aboutsummaryrefslogtreecommitdiff
path: root/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'crypto')
-rw-r--r--crypto/842.c182
-rw-r--r--crypto/Kconfig723
-rw-r--r--crypto/Makefile54
-rw-r--r--crypto/ablk_helper.c150
-rw-r--r--crypto/ablkcipher.c379
-rw-r--r--crypto/aead.c92
-rw-r--r--crypto/aes_generic.c22
-rw-r--r--crypto/af_alg.c485
-rw-r--r--crypto/ahash.c462
-rw-r--r--crypto/algapi.c296
-rw-r--r--crypto/algboss.c72
-rw-r--r--crypto/algif_hash.c324
-rw-r--r--crypto/algif_skcipher.c635
-rw-r--r--crypto/ansi_cprng.c149
-rw-r--r--crypto/anubis.c23
-rw-r--r--crypto/api.c112
-rw-r--r--crypto/arc4.c124
-rw-r--r--crypto/asymmetric_keys/.gitignore1
-rw-r--r--crypto/asymmetric_keys/Kconfig40
-rw-r--r--crypto/asymmetric_keys/Makefile27
-rw-r--r--crypto/asymmetric_keys/asymmetric_keys.h15
-rw-r--r--crypto/asymmetric_keys/asymmetric_type.c275
-rw-r--r--crypto/asymmetric_keys/public_key.c128
-rw-r--r--crypto/asymmetric_keys/public_key.h36
-rw-r--r--crypto/asymmetric_keys/rsa.c278
-rw-r--r--crypto/asymmetric_keys/signature.c49
-rw-r--r--crypto/asymmetric_keys/x509.asn160
-rw-r--r--crypto/asymmetric_keys/x509_cert_parser.c538
-rw-r--r--crypto/asymmetric_keys/x509_parser.h42
-rw-r--r--crypto/asymmetric_keys/x509_public_key.c218
-rw-r--r--crypto/asymmetric_keys/x509_rsakey.asn14
-rw-r--r--crypto/async_tx/Kconfig13
-rw-r--r--crypto/async_tx/Makefile4
-rw-r--r--crypto/async_tx/async_memcpy.c86
-rw-r--r--crypto/async_tx/async_memset.c101
-rw-r--r--crypto/async_tx/async_pq.c441
-rw-r--r--crypto/async_tx/async_raid6_recov.c531
-rw-r--r--crypto/async_tx/async_tx.c141
-rw-r--r--crypto/async_tx/async_xor.c327
-rw-r--r--crypto/async_tx/raid6test.c253
-rw-r--r--crypto/authenc.c447
-rw-r--r--crypto/authencesn.c816
-rw-r--r--crypto/blkcipher.c131
-rw-r--r--crypto/blowfish_common.c (renamed from crypto/blowfish.c)116
-rw-r--r--crypto/blowfish_generic.c141
-rw-r--r--crypto/camellia_generic.c (renamed from crypto/camellia.c)759
-rw-r--r--crypto/cast5_generic.c (renamed from crypto/cast5.c)415
-rw-r--r--crypto/cast6_generic.c294
-rw-r--r--crypto/cast_common.c (renamed from crypto/cast6.c)285
-rw-r--r--crypto/ccm.c34
-rw-r--r--crypto/chainiv.c8
-rw-r--r--crypto/cipher.c2
-rw-r--r--crypto/cmac.c315
-rw-r--r--crypto/compress.c4
-rw-r--r--crypto/crc32.c158
-rw-r--r--crypto/crc32c.c260
-rw-r--r--crypto/crc32c_generic.c174
-rw-r--r--crypto/crct10dif_common.c82
-rw-r--r--crypto/crct10dif_generic.c127
-rw-r--r--crypto/cryptd.c746
-rw-r--r--crypto/crypto_null.c71
-rw-r--r--crypto/crypto_user.c539
-rw-r--r--crypto/crypto_wq.c40
-rw-r--r--crypto/ctr.c177
-rw-r--r--crypto/cts.c3
-rw-r--r--crypto/deflate.c32
-rw-r--r--crypto/des_generic.c158
-rw-r--r--crypto/digest.c240
-rw-r--r--crypto/ecb.c2
-rw-r--r--crypto/eseqiv.c21
-rw-r--r--crypto/fcrypt.c9
-rw-r--r--crypto/gcm.c997
-rw-r--r--crypto/gf128mul.c8
-rw-r--r--crypto/ghash-generic.c175
-rw-r--r--crypto/hash.c183
-rw-r--r--crypto/hash_info.c56
-rw-r--r--crypto/hmac.c303
-rw-r--r--crypto/internal.h47
-rw-r--r--crypto/khazad.c1
-rw-r--r--crypto/krng.c1
-rw-r--r--crypto/lrw.c156
-rw-r--r--crypto/lz4.c106
-rw-r--r--crypto/lz4hc.c106
-rw-r--r--crypto/lzo.c1
-rw-r--r--crypto/md4.c1
-rw-r--r--crypto/md5.c133
-rw-r--r--crypto/memneq.c168
-rw-r--r--crypto/pcompress.c120
-rw-r--r--crypto/pcrypt.c567
-rw-r--r--crypto/proc.c22
-rw-r--r--crypto/rmd128.c3
-rw-r--r--crypto/rmd160.c3
-rw-r--r--crypto/rmd256.c3
-rw-r--r--crypto/rmd320.c3
-rw-r--r--crypto/rng.c32
-rw-r--r--crypto/salsa20_generic.c1
-rw-r--r--crypto/scatterwalk.c32
-rw-r--r--crypto/seed.c1
-rw-r--r--crypto/seqiv.c4
-rw-r--r--crypto/serpent.c587
-rw-r--r--crypto/serpent_generic.c669
-rw-r--r--crypto/sha1_generic.c61
-rw-r--r--crypto/sha256_generic.c136
-rw-r--r--crypto/sha512_generic.c142
-rw-r--r--crypto/shash.c371
-rw-r--r--crypto/tcrypt.c1456
-rw-r--r--crypto/tcrypt.h43
-rw-r--r--crypto/tea.c41
-rw-r--r--crypto/testmgr.c1827
-rw-r--r--crypto/testmgr.h19984
-rw-r--r--crypto/tgr192.c38
-rw-r--r--crypto/twofish_common.c13
-rw-r--r--crypto/twofish_generic.c (renamed from crypto/twofish.c)2
-rw-r--r--crypto/vmac.c715
-rw-r--r--crypto/wp512.c57
-rw-r--r--crypto/xcbc.c371
-rw-r--r--crypto/xor.c32
-rw-r--r--crypto/xts.c83
-rw-r--r--crypto/zlib.c380
119 files changed, 39506 insertions, 5233 deletions
diff --git a/crypto/842.c b/crypto/842.c
new file mode 100644
index 00000000000..65c7a89cfa0
--- /dev/null
+++ b/crypto/842.c
@@ -0,0 +1,182 @@
+/*
+ * Cryptographic API for the 842 compression algorithm.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ *
+ * Copyright (C) IBM Corporation, 2011
+ *
+ * Authors: Robert Jennings <rcj@linux.vnet.ibm.com>
+ * Seth Jennings <sjenning@linux.vnet.ibm.com>
+ */
+
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/crypto.h>
+#include <linux/vmalloc.h>
+#include <linux/nx842.h>
+#include <linux/lzo.h>
+#include <linux/timer.h>
+
+static int nx842_uselzo;
+
+struct nx842_ctx {
+ void *nx842_wmem; /* working memory for 842/lzo */
+};
+
+enum nx842_crypto_type {
+ NX842_CRYPTO_TYPE_842,
+ NX842_CRYPTO_TYPE_LZO
+};
+
+#define NX842_SENTINEL 0xdeadbeef
+
+struct nx842_crypto_header {
+ unsigned int sentinel; /* debug */
+ enum nx842_crypto_type type;
+};
+
+static int nx842_init(struct crypto_tfm *tfm)
+{
+ struct nx842_ctx *ctx = crypto_tfm_ctx(tfm);
+ int wmemsize;
+
+ wmemsize = max_t(int, nx842_get_workmem_size(), LZO1X_MEM_COMPRESS);
+ ctx->nx842_wmem = kmalloc(wmemsize, GFP_NOFS);
+ if (!ctx->nx842_wmem)
+ return -ENOMEM;
+
+ return 0;
+}
+
+static void nx842_exit(struct crypto_tfm *tfm)
+{
+ struct nx842_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ kfree(ctx->nx842_wmem);
+}
+
+static void nx842_reset_uselzo(unsigned long data)
+{
+ nx842_uselzo = 0;
+}
+
+static DEFINE_TIMER(failover_timer, nx842_reset_uselzo, 0, 0);
+
+static int nx842_crypto_compress(struct crypto_tfm *tfm, const u8 *src,
+ unsigned int slen, u8 *dst, unsigned int *dlen)
+{
+ struct nx842_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct nx842_crypto_header *hdr;
+ unsigned int tmp_len = *dlen;
+ size_t lzodlen; /* needed for lzo */
+ int err;
+
+ *dlen = 0;
+ hdr = (struct nx842_crypto_header *)dst;
+ hdr->sentinel = NX842_SENTINEL; /* debug */
+ dst += sizeof(struct nx842_crypto_header);
+ tmp_len -= sizeof(struct nx842_crypto_header);
+ lzodlen = tmp_len;
+
+ if (likely(!nx842_uselzo)) {
+ err = nx842_compress(src, slen, dst, &tmp_len, ctx->nx842_wmem);
+
+ if (likely(!err)) {
+ hdr->type = NX842_CRYPTO_TYPE_842;
+ *dlen = tmp_len + sizeof(struct nx842_crypto_header);
+ return 0;
+ }
+
+ /* hardware failed */
+ nx842_uselzo = 1;
+
+ /* set timer to check for hardware again in 1 second */
+ mod_timer(&failover_timer, jiffies + msecs_to_jiffies(1000));
+ }
+
+ /* no hardware, use lzo */
+ err = lzo1x_1_compress(src, slen, dst, &lzodlen, ctx->nx842_wmem);
+ if (err != LZO_E_OK)
+ return -EINVAL;
+
+ hdr->type = NX842_CRYPTO_TYPE_LZO;
+ *dlen = lzodlen + sizeof(struct nx842_crypto_header);
+ return 0;
+}
+
+static int nx842_crypto_decompress(struct crypto_tfm *tfm, const u8 *src,
+ unsigned int slen, u8 *dst, unsigned int *dlen)
+{
+ struct nx842_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct nx842_crypto_header *hdr;
+ unsigned int tmp_len = *dlen;
+ size_t lzodlen; /* needed for lzo */
+ int err;
+
+ *dlen = 0;
+ hdr = (struct nx842_crypto_header *)src;
+
+ if (unlikely(hdr->sentinel != NX842_SENTINEL))
+ return -EINVAL;
+
+ src += sizeof(struct nx842_crypto_header);
+ slen -= sizeof(struct nx842_crypto_header);
+
+ if (likely(hdr->type == NX842_CRYPTO_TYPE_842)) {
+ err = nx842_decompress(src, slen, dst, &tmp_len,
+ ctx->nx842_wmem);
+ if (err)
+ return -EINVAL;
+ *dlen = tmp_len;
+ } else if (hdr->type == NX842_CRYPTO_TYPE_LZO) {
+ lzodlen = tmp_len;
+ err = lzo1x_decompress_safe(src, slen, dst, &lzodlen);
+ if (err != LZO_E_OK)
+ return -EINVAL;
+ *dlen = lzodlen;
+ } else
+ return -EINVAL;
+
+ return 0;
+}
+
+static struct crypto_alg alg = {
+ .cra_name = "842",
+ .cra_flags = CRYPTO_ALG_TYPE_COMPRESS,
+ .cra_ctxsize = sizeof(struct nx842_ctx),
+ .cra_module = THIS_MODULE,
+ .cra_init = nx842_init,
+ .cra_exit = nx842_exit,
+ .cra_u = { .compress = {
+ .coa_compress = nx842_crypto_compress,
+ .coa_decompress = nx842_crypto_decompress } }
+};
+
+static int __init nx842_mod_init(void)
+{
+ del_timer(&failover_timer);
+ return crypto_register_alg(&alg);
+}
+
+static void __exit nx842_mod_exit(void)
+{
+ crypto_unregister_alg(&alg);
+}
+
+module_init(nx842_mod_init);
+module_exit(nx842_mod_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("842 Compression Algorithm");
diff --git a/crypto/Kconfig b/crypto/Kconfig
index 8dde4fcf99c..ce4012a5878 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -23,6 +23,7 @@ comment "Crypto core or helper"
config CRYPTO_FIPS
bool "FIPS 200 compliance"
+ depends on CRYPTO_ANSI_CPRNG && !CRYPTO_MANAGER_DISABLE_TESTS
help
This options enables the fips boot option which is
required if you want to system to operate in a FIPS 200
@@ -56,6 +57,7 @@ config CRYPTO_BLKCIPHER2
tristate
select CRYPTO_ALGAPI2
select CRYPTO_RNG2
+ select CRYPTO_WORKQUEUE
config CRYPTO_HASH
tristate
@@ -75,6 +77,15 @@ config CRYPTO_RNG2
tristate
select CRYPTO_ALGAPI2
+config CRYPTO_PCOMP
+ tristate
+ select CRYPTO_PCOMP2
+ select CRYPTO_ALGAPI
+
+config CRYPTO_PCOMP2
+ tristate
+ select CRYPTO_ALGAPI2
+
config CRYPTO_MANAGER
tristate "Cryptographic algorithm manager"
select CRYPTO_MANAGER2
@@ -87,10 +98,26 @@ config CRYPTO_MANAGER2
select CRYPTO_AEAD2
select CRYPTO_HASH2
select CRYPTO_BLKCIPHER2
+ select CRYPTO_PCOMP2
+
+config CRYPTO_USER
+ tristate "Userspace cryptographic algorithm configuration"
+ depends on NET
+ select CRYPTO_MANAGER
+ help
+ Userspace configuration for cryptographic instantiations such as
+ cbc(aes).
+
+config CRYPTO_MANAGER_DISABLE_TESTS
+ bool "Disable run-time self tests"
+ default y
+ depends on CRYPTO_MANAGER2
+ help
+ Disable run-time self tests that normally take place at
+ algorithm registration.
config CRYPTO_GF128MUL
- tristate "GF(2^128) multiplication functions (EXPERIMENTAL)"
- depends on EXPERIMENTAL
+ tristate "GF(2^128) multiplication functions"
help
Efficient table driven implementation of multiplications in the
field GF(2^128). This is needed by some cypher modes. This
@@ -106,11 +133,25 @@ config CRYPTO_NULL
help
These are 'Null' algorithms, used by IPsec, which do nothing.
+config CRYPTO_PCRYPT
+ tristate "Parallel crypto engine"
+ depends on SMP
+ select PADATA
+ select CRYPTO_MANAGER
+ select CRYPTO_AEAD
+ help
+ This converts an arbitrary crypto algorithm into a parallel
+ algorithm that executes in kernel threads.
+
+config CRYPTO_WORKQUEUE
+ tristate
+
config CRYPTO_CRYPTD
tristate "Software async crypto daemon"
select CRYPTO_BLKCIPHER
select CRYPTO_HASH
select CRYPTO_MANAGER
+ select CRYPTO_WORKQUEUE
help
This is a generic software asynchronous crypto daemon that
converts an arbitrary synchronous software crypto algorithm
@@ -133,6 +174,15 @@ config CRYPTO_TEST
help
Quick & dirty crypto test module.
+config CRYPTO_ABLK_HELPER
+ tristate
+ select CRYPTO_CRYPTD
+
+config CRYPTO_GLUE_HELPER_X86
+ tristate
+ depends on X86
+ select CRYPTO_ALGAPI
+
comment "Authenticated Encryption with Associated Data"
config CRYPTO_CCM
@@ -146,7 +196,8 @@ config CRYPTO_GCM
tristate "GCM/GMAC support"
select CRYPTO_CTR
select CRYPTO_AEAD
- select CRYPTO_GF128MUL
+ select CRYPTO_GHASH
+ select CRYPTO_NULL
help
Support for Galois/Counter Mode (GCM) and Galois Message
Authentication Code (GMAC). Required for IPSec.
@@ -200,8 +251,7 @@ config CRYPTO_ECB
the input block by block.
config CRYPTO_LRW
- tristate "LRW support (EXPERIMENTAL)"
- depends on EXPERIMENTAL
+ tristate "LRW support"
select CRYPTO_BLKCIPHER
select CRYPTO_MANAGER
select CRYPTO_GF128MUL
@@ -221,8 +271,7 @@ config CRYPTO_PCBC
This block cipher algorithm is required for RxRPC.
config CRYPTO_XTS
- tristate "XTS support (EXPERIMENTAL)"
- depends on EXPERIMENTAL
+ tristate "XTS support"
select CRYPTO_BLKCIPHER
select CRYPTO_MANAGER
select CRYPTO_GF128MUL
@@ -233,6 +282,17 @@ config CRYPTO_XTS
comment "Hash modes"
+config CRYPTO_CMAC
+ tristate "CMAC support"
+ select CRYPTO_HASH
+ select CRYPTO_MANAGER
+ help
+ Cipher-based Message Authentication Code (CMAC) specified by
+ The National Institute of Standards and Technology (NIST).
+
+ https://tools.ietf.org/html/rfc4493
+ http://csrc.nist.gov/publications/nistpubs/800-38B/SP_800-38B.pdf
+
config CRYPTO_HMAC
tristate "HMAC support"
select CRYPTO_HASH
@@ -243,7 +303,6 @@ config CRYPTO_HMAC
config CRYPTO_XCBC
tristate "XCBC support"
- depends on EXPERIMENTAL
select CRYPTO_HASH
select CRYPTO_MANAGER
help
@@ -252,11 +311,23 @@ config CRYPTO_XCBC
http://csrc.nist.gov/encryption/modes/proposedmodes/
xcbc-mac/xcbc-mac-spec.pdf
+config CRYPTO_VMAC
+ tristate "VMAC support"
+ select CRYPTO_HASH
+ select CRYPTO_MANAGER
+ help
+ VMAC is a message authentication algorithm designed for
+ very high speed on 64-bit architectures.
+
+ See also:
+ <http://fastcrypto.org/vmac>
+
comment "Digest"
config CRYPTO_CRC32C
tristate "CRC32c CRC algorithm"
select CRYPTO_HASH
+ select CRC32
help
Castagnoli, et al Cyclic Redundancy-Check Algorithm. Used
by iSCSI for header and data digests and by others.
@@ -274,6 +345,61 @@ config CRYPTO_CRC32C_INTEL
gain performance compared with software implementation.
Module will be crc32c-intel.
+config CRYPTO_CRC32C_SPARC64
+ tristate "CRC32c CRC algorithm (SPARC64)"
+ depends on SPARC64
+ select CRYPTO_HASH
+ select CRC32
+ help
+ CRC32c CRC algorithm implemented using sparc64 crypto instructions,
+ when available.
+
+config CRYPTO_CRC32
+ tristate "CRC32 CRC algorithm"
+ select CRYPTO_HASH
+ select CRC32
+ help
+ CRC-32-IEEE 802.3 cyclic redundancy-check algorithm.
+ Shash crypto api wrappers to crc32_le function.
+
+config CRYPTO_CRC32_PCLMUL
+ tristate "CRC32 PCLMULQDQ hardware acceleration"
+ depends on X86
+ select CRYPTO_HASH
+ select CRC32
+ help
+ From Intel Westmere and AMD Bulldozer processor with SSE4.2
+ and PCLMULQDQ supported, the processor will support
+ CRC32 PCLMULQDQ implementation using hardware accelerated PCLMULQDQ
+ instruction. This option will create 'crc32-plcmul' module,
+ which will enable any routine to use the CRC-32-IEEE 802.3 checksum
+ and gain better performance as compared with the table implementation.
+
+config CRYPTO_CRCT10DIF
+ tristate "CRCT10DIF algorithm"
+ select CRYPTO_HASH
+ help
+ CRC T10 Data Integrity Field computation is being cast as
+ a crypto transform. This allows for faster crc t10 diff
+ transforms to be used if they are available.
+
+config CRYPTO_CRCT10DIF_PCLMUL
+ tristate "CRCT10DIF PCLMULQDQ hardware acceleration"
+ depends on X86 && 64BIT && CRC_T10DIF
+ select CRYPTO_HASH
+ help
+ For x86_64 processors with SSE4.2 and PCLMULQDQ supported,
+ CRC T10 DIF PCLMULQDQ computation can be hardware
+ accelerated PCLMULQDQ instruction. This option will create
+ 'crct10dif-plcmul' module, which is faster when computing the
+ crct10dif checksum as compared with the generic table implementation.
+
+config CRYPTO_GHASH
+ tristate "GHASH digest algorithm"
+ select CRYPTO_GF128MUL
+ help
+ GHASH is message digest algorithm for GCM (Galois/Counter Mode).
+
config CRYPTO_MD4
tristate "MD4 digest algorithm"
select CRYPTO_HASH
@@ -286,6 +412,15 @@ config CRYPTO_MD5
help
MD5 message digest algorithm (RFC1321).
+config CRYPTO_MD5_SPARC64
+ tristate "MD5 digest algorithm (SPARC64)"
+ depends on SPARC64
+ select CRYPTO_MD5
+ select CRYPTO_HASH
+ help
+ MD5 message digest algorithm (RFC1321) implemented
+ using sparc64 crypto instructions, when available.
+
config CRYPTO_MICHAEL_MIC
tristate "Michael MIC keyed digest algorithm"
select CRYPTO_HASH
@@ -302,11 +437,11 @@ config CRYPTO_RMD128
RIPEMD-128 (ISO/IEC 10118-3:2004).
RIPEMD-128 is a 128-bit cryptographic hash function. It should only
- to be used as a secure replacement for RIPEMD. For other use cases
+ be used as a secure replacement for RIPEMD. For other use cases,
RIPEMD-160 should be used.
Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel.
- See <http://home.esat.kuleuven.be/~bosselae/ripemd160.html>
+ See <http://homes.esat.kuleuven.be/~bosselae/ripemd160.html>
config CRYPTO_RMD160
tristate "RIPEMD-160 digest algorithm"
@@ -323,7 +458,7 @@ config CRYPTO_RMD160
against RIPEMD-160.
Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel.
- See <http://home.esat.kuleuven.be/~bosselae/ripemd160.html>
+ See <http://homes.esat.kuleuven.be/~bosselae/ripemd160.html>
config CRYPTO_RMD256
tristate "RIPEMD-256 digest algorithm"
@@ -335,7 +470,7 @@ config CRYPTO_RMD256
(than RIPEMD-128).
Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel.
- See <http://home.esat.kuleuven.be/~bosselae/ripemd160.html>
+ See <http://homes.esat.kuleuven.be/~bosselae/ripemd160.html>
config CRYPTO_RMD320
tristate "RIPEMD-320 digest algorithm"
@@ -347,7 +482,7 @@ config CRYPTO_RMD320
(than RIPEMD-160).
Developed by Hans Dobbertin, Antoon Bosselaers and Bart Preneel.
- See <http://home.esat.kuleuven.be/~bosselae/ripemd160.html>
+ See <http://homes.esat.kuleuven.be/~bosselae/ripemd160.html>
config CRYPTO_SHA1
tristate "SHA1 digest algorithm"
@@ -355,6 +490,63 @@ config CRYPTO_SHA1
help
SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2).
+config CRYPTO_SHA1_SSSE3
+ tristate "SHA1 digest algorithm (SSSE3/AVX/AVX2)"
+ depends on X86 && 64BIT
+ select CRYPTO_SHA1
+ select CRYPTO_HASH
+ help
+ SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2) implemented
+ using Supplemental SSE3 (SSSE3) instructions or Advanced Vector
+ Extensions (AVX/AVX2), when available.
+
+config CRYPTO_SHA256_SSSE3
+ tristate "SHA256 digest algorithm (SSSE3/AVX/AVX2)"
+ depends on X86 && 64BIT
+ select CRYPTO_SHA256
+ select CRYPTO_HASH
+ help
+ SHA-256 secure hash standard (DFIPS 180-2) implemented
+ using Supplemental SSE3 (SSSE3) instructions, or Advanced Vector
+ Extensions version 1 (AVX1), or Advanced Vector Extensions
+ version 2 (AVX2) instructions, when available.
+
+config CRYPTO_SHA512_SSSE3
+ tristate "SHA512 digest algorithm (SSSE3/AVX/AVX2)"
+ depends on X86 && 64BIT
+ select CRYPTO_SHA512
+ select CRYPTO_HASH
+ help
+ SHA-512 secure hash standard (DFIPS 180-2) implemented
+ using Supplemental SSE3 (SSSE3) instructions, or Advanced Vector
+ Extensions version 1 (AVX1), or Advanced Vector Extensions
+ version 2 (AVX2) instructions, when available.
+
+config CRYPTO_SHA1_SPARC64
+ tristate "SHA1 digest algorithm (SPARC64)"
+ depends on SPARC64
+ select CRYPTO_SHA1
+ select CRYPTO_HASH
+ help
+ SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2) implemented
+ using sparc64 crypto instructions, when available.
+
+config CRYPTO_SHA1_ARM
+ tristate "SHA1 digest algorithm (ARM-asm)"
+ depends on ARM
+ select CRYPTO_SHA1
+ select CRYPTO_HASH
+ help
+ SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2) implemented
+ using optimized ARM assembler.
+
+config CRYPTO_SHA1_PPC
+ tristate "SHA1 digest algorithm (powerpc)"
+ depends on PPC
+ help
+ This is the powerpc hardware accelerated implementation of the
+ SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2).
+
config CRYPTO_SHA256
tristate "SHA224 and SHA256 digest algorithm"
select CRYPTO_HASH
@@ -367,6 +559,15 @@ config CRYPTO_SHA256
This code also includes SHA-224, a 224 bit hash with 112 bits
of security against collision attacks.
+config CRYPTO_SHA256_SPARC64
+ tristate "SHA224 and SHA256 digest algorithm (SPARC64)"
+ depends on SPARC64
+ select CRYPTO_SHA256
+ select CRYPTO_HASH
+ help
+ SHA-256 secure hash standard (DFIPS 180-2) implemented
+ using sparc64 crypto instructions, when available.
+
config CRYPTO_SHA512
tristate "SHA384 and SHA512 digest algorithms"
select CRYPTO_HASH
@@ -379,6 +580,15 @@ config CRYPTO_SHA512
This code also includes SHA-384, a 384 bit hash with 192 bits
of security against collision attacks.
+config CRYPTO_SHA512_SPARC64
+ tristate "SHA384 and SHA512 digest algorithm (SPARC64)"
+ depends on SPARC64
+ select CRYPTO_SHA512
+ select CRYPTO_HASH
+ help
+ SHA-512 secure hash standard (DFIPS 180-2) implemented
+ using sparc64 crypto instructions, when available.
+
config CRYPTO_TGR192
tristate "Tiger digest algorithms"
select CRYPTO_HASH
@@ -402,7 +612,15 @@ config CRYPTO_WP512
Whirlpool will be part of the ISO/IEC 10118-3:2003(E) standard
See also:
- <http://planeta.terra.com.br/informatica/paulobarreto/WhirlpoolPage.html>
+ <http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html>
+
+config CRYPTO_GHASH_CLMUL_NI_INTEL
+ tristate "GHASH digest algorithm (CLMUL-NI accelerated)"
+ depends on X86 && 64BIT
+ select CRYPTO_CRYPTD
+ help
+ GHASH is message digest algorithm for GCM (Galois/Counter Mode).
+ The implementation is accelerated by CLMUL-NI of Intel.
comment "Ciphers"
@@ -470,6 +688,109 @@ config CRYPTO_AES_X86_64
See <http://csrc.nist.gov/encryption/aes/> for more information.
+config CRYPTO_AES_NI_INTEL
+ tristate "AES cipher algorithms (AES-NI)"
+ depends on X86
+ select CRYPTO_AES_X86_64 if 64BIT
+ select CRYPTO_AES_586 if !64BIT
+ select CRYPTO_CRYPTD
+ select CRYPTO_ABLK_HELPER
+ select CRYPTO_ALGAPI
+ select CRYPTO_GLUE_HELPER_X86 if 64BIT
+ select CRYPTO_LRW
+ select CRYPTO_XTS
+ help
+ Use Intel AES-NI instructions for AES algorithm.
+
+ AES cipher algorithms (FIPS-197). AES uses the Rijndael
+ algorithm.
+
+ Rijndael appears to be consistently a very good performer in
+ both hardware and software across a wide range of computing
+ environments regardless of its use in feedback or non-feedback
+ modes. Its key setup time is excellent, and its key agility is
+ good. Rijndael's very low memory requirements make it very well
+ suited for restricted-space environments, in which it also
+ demonstrates excellent performance. Rijndael's operations are
+ among the easiest to defend against power and timing attacks.
+
+ The AES specifies three key sizes: 128, 192 and 256 bits
+
+ See <http://csrc.nist.gov/encryption/aes/> for more information.
+
+ In addition to AES cipher algorithm support, the acceleration
+ for some popular block cipher mode is supported too, including
+ ECB, CBC, LRW, PCBC, XTS. The 64 bit version has additional
+ acceleration for CTR.
+
+config CRYPTO_AES_SPARC64
+ tristate "AES cipher algorithms (SPARC64)"
+ depends on SPARC64
+ select CRYPTO_CRYPTD
+ select CRYPTO_ALGAPI
+ help
+ Use SPARC64 crypto opcodes for AES algorithm.
+
+ AES cipher algorithms (FIPS-197). AES uses the Rijndael
+ algorithm.
+
+ Rijndael appears to be consistently a very good performer in
+ both hardware and software across a wide range of computing
+ environments regardless of its use in feedback or non-feedback
+ modes. Its key setup time is excellent, and its key agility is
+ good. Rijndael's very low memory requirements make it very well
+ suited for restricted-space environments, in which it also
+ demonstrates excellent performance. Rijndael's operations are
+ among the easiest to defend against power and timing attacks.
+
+ The AES specifies three key sizes: 128, 192 and 256 bits
+
+ See <http://csrc.nist.gov/encryption/aes/> for more information.
+
+ In addition to AES cipher algorithm support, the acceleration
+ for some popular block cipher mode is supported too, including
+ ECB and CBC.
+
+config CRYPTO_AES_ARM
+ tristate "AES cipher algorithms (ARM-asm)"
+ depends on ARM
+ select CRYPTO_ALGAPI
+ select CRYPTO_AES
+ help
+ Use optimized AES assembler routines for ARM platforms.
+
+ AES cipher algorithms (FIPS-197). AES uses the Rijndael
+ algorithm.
+
+ Rijndael appears to be consistently a very good performer in
+ both hardware and software across a wide range of computing
+ environments regardless of its use in feedback or non-feedback
+ modes. Its key setup time is excellent, and its key agility is
+ good. Rijndael's very low memory requirements make it very well
+ suited for restricted-space environments, in which it also
+ demonstrates excellent performance. Rijndael's operations are
+ among the easiest to defend against power and timing attacks.
+
+ The AES specifies three key sizes: 128, 192 and 256 bits
+
+ See <http://csrc.nist.gov/encryption/aes/> for more information.
+
+config CRYPTO_AES_ARM_BS
+ tristate "Bit sliced AES using NEON instructions"
+ depends on ARM && KERNEL_MODE_NEON
+ select CRYPTO_ALGAPI
+ select CRYPTO_AES_ARM
+ select CRYPTO_ABLK_HELPER
+ help
+ Use a faster and more secure NEON based implementation of AES in CBC,
+ CTR and XTS modes
+
+ Bit sliced AES gives around 45% speedup on Cortex-A15 for CTR mode
+ and for XTS mode encryption, CBC and XTS mode decryption speedup is
+ around 25%. (CBC encryption speed is not affected by this driver.)
+ This implementation does not rely on any lookup tables so it is
+ believed to be invulnerable to cache timing attacks.
+
config CRYPTO_ANUBIS
tristate "Anubis cipher algorithm"
select CRYPTO_ALGAPI
@@ -481,12 +802,12 @@ config CRYPTO_ANUBIS
in the NESSIE competition.
See also:
- <https://www.cosic.esat.kuleuven.ac.be/nessie/reports/>
- <http://planeta.terra.com.br/informatica/paulobarreto/AnubisPage.html>
+ <https://www.cosic.esat.kuleuven.be/nessie/reports/>
+ <http://www.larc.usp.br/~pbarreto/AnubisPage.html>
config CRYPTO_ARC4
tristate "ARC4 cipher algorithm"
- select CRYPTO_ALGAPI
+ select CRYPTO_BLKCIPHER
help
ARC4 cipher algorithm.
@@ -498,6 +819,7 @@ config CRYPTO_ARC4
config CRYPTO_BLOWFISH
tristate "Blowfish cipher algorithm"
select CRYPTO_ALGAPI
+ select CRYPTO_BLOWFISH_COMMON
help
Blowfish cipher algorithm, by Bruce Schneier.
@@ -508,6 +830,30 @@ config CRYPTO_BLOWFISH
See also:
<http://www.schneier.com/blowfish.html>
+config CRYPTO_BLOWFISH_COMMON
+ tristate
+ help
+ Common parts of the Blowfish cipher algorithm shared by the
+ generic c and the assembler implementations.
+
+ See also:
+ <http://www.schneier.com/blowfish.html>
+
+config CRYPTO_BLOWFISH_X86_64
+ tristate "Blowfish cipher algorithm (x86_64)"
+ depends on X86 && 64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_BLOWFISH_COMMON
+ help
+ Blowfish cipher algorithm (x86_64), by Bruce Schneier.
+
+ This is a variable key length cipher which can use keys from 32
+ bits to 448 bits in length. It's fast, simple and specifically
+ designed for use on "large microprocessors".
+
+ See also:
+ <http://www.schneier.com/blowfish.html>
+
config CRYPTO_CAMELLIA
tristate "Camellia cipher algorithms"
depends on CRYPTO
@@ -523,26 +869,156 @@ config CRYPTO_CAMELLIA
See also:
<https://info.isl.ntt.co.jp/crypt/eng/camellia/index_s.html>
+config CRYPTO_CAMELLIA_X86_64
+ tristate "Camellia cipher algorithm (x86_64)"
+ depends on X86 && 64BIT
+ depends on CRYPTO
+ select CRYPTO_ALGAPI
+ select CRYPTO_GLUE_HELPER_X86
+ select CRYPTO_LRW
+ select CRYPTO_XTS
+ help
+ Camellia cipher algorithm module (x86_64).
+
+ Camellia is a symmetric key block cipher developed jointly
+ at NTT and Mitsubishi Electric Corporation.
+
+ The Camellia specifies three key sizes: 128, 192 and 256 bits.
+
+ See also:
+ <https://info.isl.ntt.co.jp/crypt/eng/camellia/index_s.html>
+
+config CRYPTO_CAMELLIA_AESNI_AVX_X86_64
+ tristate "Camellia cipher algorithm (x86_64/AES-NI/AVX)"
+ depends on X86 && 64BIT
+ depends on CRYPTO
+ select CRYPTO_ALGAPI
+ select CRYPTO_CRYPTD
+ select CRYPTO_ABLK_HELPER
+ select CRYPTO_GLUE_HELPER_X86
+ select CRYPTO_CAMELLIA_X86_64
+ select CRYPTO_LRW
+ select CRYPTO_XTS
+ help
+ Camellia cipher algorithm module (x86_64/AES-NI/AVX).
+
+ Camellia is a symmetric key block cipher developed jointly
+ at NTT and Mitsubishi Electric Corporation.
+
+ The Camellia specifies three key sizes: 128, 192 and 256 bits.
+
+ See also:
+ <https://info.isl.ntt.co.jp/crypt/eng/camellia/index_s.html>
+
+config CRYPTO_CAMELLIA_AESNI_AVX2_X86_64
+ tristate "Camellia cipher algorithm (x86_64/AES-NI/AVX2)"
+ depends on X86 && 64BIT
+ depends on CRYPTO
+ select CRYPTO_ALGAPI
+ select CRYPTO_CRYPTD
+ select CRYPTO_ABLK_HELPER
+ select CRYPTO_GLUE_HELPER_X86
+ select CRYPTO_CAMELLIA_X86_64
+ select CRYPTO_CAMELLIA_AESNI_AVX_X86_64
+ select CRYPTO_LRW
+ select CRYPTO_XTS
+ help
+ Camellia cipher algorithm module (x86_64/AES-NI/AVX2).
+
+ Camellia is a symmetric key block cipher developed jointly
+ at NTT and Mitsubishi Electric Corporation.
+
+ The Camellia specifies three key sizes: 128, 192 and 256 bits.
+
+ See also:
+ <https://info.isl.ntt.co.jp/crypt/eng/camellia/index_s.html>
+
+config CRYPTO_CAMELLIA_SPARC64
+ tristate "Camellia cipher algorithm (SPARC64)"
+ depends on SPARC64
+ depends on CRYPTO
+ select CRYPTO_ALGAPI
+ help
+ Camellia cipher algorithm module (SPARC64).
+
+ Camellia is a symmetric key block cipher developed jointly
+ at NTT and Mitsubishi Electric Corporation.
+
+ The Camellia specifies three key sizes: 128, 192 and 256 bits.
+
+ See also:
+ <https://info.isl.ntt.co.jp/crypt/eng/camellia/index_s.html>
+
+config CRYPTO_CAST_COMMON
+ tristate
+ help
+ Common parts of the CAST cipher algorithms shared by the
+ generic c and the assembler implementations.
+
config CRYPTO_CAST5
tristate "CAST5 (CAST-128) cipher algorithm"
select CRYPTO_ALGAPI
+ select CRYPTO_CAST_COMMON
+ help
+ The CAST5 encryption algorithm (synonymous with CAST-128) is
+ described in RFC2144.
+
+config CRYPTO_CAST5_AVX_X86_64
+ tristate "CAST5 (CAST-128) cipher algorithm (x86_64/AVX)"
+ depends on X86 && 64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_CRYPTD
+ select CRYPTO_ABLK_HELPER
+ select CRYPTO_CAST_COMMON
+ select CRYPTO_CAST5
help
The CAST5 encryption algorithm (synonymous with CAST-128) is
described in RFC2144.
+ This module provides the Cast5 cipher algorithm that processes
+ sixteen blocks parallel using the AVX instruction set.
+
config CRYPTO_CAST6
tristate "CAST6 (CAST-256) cipher algorithm"
select CRYPTO_ALGAPI
+ select CRYPTO_CAST_COMMON
+ help
+ The CAST6 encryption algorithm (synonymous with CAST-256) is
+ described in RFC2612.
+
+config CRYPTO_CAST6_AVX_X86_64
+ tristate "CAST6 (CAST-256) cipher algorithm (x86_64/AVX)"
+ depends on X86 && 64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_CRYPTD
+ select CRYPTO_ABLK_HELPER
+ select CRYPTO_GLUE_HELPER_X86
+ select CRYPTO_CAST_COMMON
+ select CRYPTO_CAST6
+ select CRYPTO_LRW
+ select CRYPTO_XTS
help
The CAST6 encryption algorithm (synonymous with CAST-256) is
described in RFC2612.
+ This module provides the Cast6 cipher algorithm that processes
+ eight blocks parallel using the AVX instruction set.
+
config CRYPTO_DES
tristate "DES and Triple DES EDE cipher algorithms"
select CRYPTO_ALGAPI
help
DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3).
+config CRYPTO_DES_SPARC64
+ tristate "DES and Triple DES EDE cipher algorithms (SPARC64)"
+ depends on SPARC64
+ select CRYPTO_ALGAPI
+ select CRYPTO_DES
+ help
+ DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3),
+ optimized using SPARC64 crypto opcodes.
+
config CRYPTO_FCRYPT
tristate "FCrypt cipher algorithm"
select CRYPTO_ALGAPI
@@ -561,11 +1037,10 @@ config CRYPTO_KHAZAD
on 32-bit processors. Khazad uses an 128 bit key size.
See also:
- <http://planeta.terra.com.br/informatica/paulobarreto/KhazadPage.html>
+ <http://www.larc.usp.br/~pbarreto/KhazadPage.html>
config CRYPTO_SALSA20
- tristate "Salsa20 stream cipher algorithm (EXPERIMENTAL)"
- depends on EXPERIMENTAL
+ tristate "Salsa20 stream cipher algorithm"
select CRYPTO_BLKCIPHER
help
Salsa20 stream cipher algorithm.
@@ -577,9 +1052,8 @@ config CRYPTO_SALSA20
Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html>
config CRYPTO_SALSA20_586
- tristate "Salsa20 stream cipher algorithm (i586) (EXPERIMENTAL)"
+ tristate "Salsa20 stream cipher algorithm (i586)"
depends on (X86 || UML_X86) && !64BIT
- depends on EXPERIMENTAL
select CRYPTO_BLKCIPHER
help
Salsa20 stream cipher algorithm.
@@ -591,9 +1065,8 @@ config CRYPTO_SALSA20_586
Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html>
config CRYPTO_SALSA20_X86_64
- tristate "Salsa20 stream cipher algorithm (x86_64) (EXPERIMENTAL)"
+ tristate "Salsa20 stream cipher algorithm (x86_64)"
depends on (X86 || UML_X86) && 64BIT
- depends on EXPERIMENTAL
select CRYPTO_BLKCIPHER
help
Salsa20 stream cipher algorithm.
@@ -631,6 +1104,95 @@ config CRYPTO_SERPENT
See also:
<http://www.cl.cam.ac.uk/~rja14/serpent.html>
+config CRYPTO_SERPENT_SSE2_X86_64
+ tristate "Serpent cipher algorithm (x86_64/SSE2)"
+ depends on X86 && 64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_CRYPTD
+ select CRYPTO_ABLK_HELPER
+ select CRYPTO_GLUE_HELPER_X86
+ select CRYPTO_SERPENT
+ select CRYPTO_LRW
+ select CRYPTO_XTS
+ help
+ Serpent cipher algorithm, by Anderson, Biham & Knudsen.
+
+ Keys are allowed to be from 0 to 256 bits in length, in steps
+ of 8 bits.
+
+ This module provides Serpent cipher algorithm that processes eigth
+ blocks parallel using SSE2 instruction set.
+
+ See also:
+ <http://www.cl.cam.ac.uk/~rja14/serpent.html>
+
+config CRYPTO_SERPENT_SSE2_586
+ tristate "Serpent cipher algorithm (i586/SSE2)"
+ depends on X86 && !64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_CRYPTD
+ select CRYPTO_ABLK_HELPER
+ select CRYPTO_GLUE_HELPER_X86
+ select CRYPTO_SERPENT
+ select CRYPTO_LRW
+ select CRYPTO_XTS
+ help
+ Serpent cipher algorithm, by Anderson, Biham & Knudsen.
+
+ Keys are allowed to be from 0 to 256 bits in length, in steps
+ of 8 bits.
+
+ This module provides Serpent cipher algorithm that processes four
+ blocks parallel using SSE2 instruction set.
+
+ See also:
+ <http://www.cl.cam.ac.uk/~rja14/serpent.html>
+
+config CRYPTO_SERPENT_AVX_X86_64
+ tristate "Serpent cipher algorithm (x86_64/AVX)"
+ depends on X86 && 64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_CRYPTD
+ select CRYPTO_ABLK_HELPER
+ select CRYPTO_GLUE_HELPER_X86
+ select CRYPTO_SERPENT
+ select CRYPTO_LRW
+ select CRYPTO_XTS
+ help
+ Serpent cipher algorithm, by Anderson, Biham & Knudsen.
+
+ Keys are allowed to be from 0 to 256 bits in length, in steps
+ of 8 bits.
+
+ This module provides the Serpent cipher algorithm that processes
+ eight blocks parallel using the AVX instruction set.
+
+ See also:
+ <http://www.cl.cam.ac.uk/~rja14/serpent.html>
+
+config CRYPTO_SERPENT_AVX2_X86_64
+ tristate "Serpent cipher algorithm (x86_64/AVX2)"
+ depends on X86 && 64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_CRYPTD
+ select CRYPTO_ABLK_HELPER
+ select CRYPTO_GLUE_HELPER_X86
+ select CRYPTO_SERPENT
+ select CRYPTO_SERPENT_AVX_X86_64
+ select CRYPTO_LRW
+ select CRYPTO_XTS
+ help
+ Serpent cipher algorithm, by Anderson, Biham & Knudsen.
+
+ Keys are allowed to be from 0 to 256 bits in length, in steps
+ of 8 bits.
+
+ This module provides Serpent cipher algorithm that processes 16
+ blocks parallel using AVX2 instruction set.
+
+ See also:
+ <http://www.cl.cam.ac.uk/~rja14/serpent.html>
+
config CRYPTO_TEA
tristate "TEA, XTEA and XETA cipher algorithms"
select CRYPTO_ALGAPI
@@ -701,6 +1263,55 @@ config CRYPTO_TWOFISH_X86_64
See also:
<http://www.schneier.com/twofish.html>
+config CRYPTO_TWOFISH_X86_64_3WAY
+ tristate "Twofish cipher algorithm (x86_64, 3-way parallel)"
+ depends on X86 && 64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_TWOFISH_COMMON
+ select CRYPTO_TWOFISH_X86_64
+ select CRYPTO_GLUE_HELPER_X86
+ select CRYPTO_LRW
+ select CRYPTO_XTS
+ help
+ Twofish cipher algorithm (x86_64, 3-way parallel).
+
+ Twofish was submitted as an AES (Advanced Encryption Standard)
+ candidate cipher by researchers at CounterPane Systems. It is a
+ 16 round block cipher supporting key sizes of 128, 192, and 256
+ bits.
+
+ This module provides Twofish cipher algorithm that processes three
+ blocks parallel, utilizing resources of out-of-order CPUs better.
+
+ See also:
+ <http://www.schneier.com/twofish.html>
+
+config CRYPTO_TWOFISH_AVX_X86_64
+ tristate "Twofish cipher algorithm (x86_64/AVX)"
+ depends on X86 && 64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_CRYPTD
+ select CRYPTO_ABLK_HELPER
+ select CRYPTO_GLUE_HELPER_X86
+ select CRYPTO_TWOFISH_COMMON
+ select CRYPTO_TWOFISH_X86_64
+ select CRYPTO_TWOFISH_X86_64_3WAY
+ select CRYPTO_LRW
+ select CRYPTO_XTS
+ help
+ Twofish cipher algorithm (x86_64/AVX).
+
+ Twofish was submitted as an AES (Advanced Encryption Standard)
+ candidate cipher by researchers at CounterPane Systems. It is a
+ 16 round block cipher supporting key sizes of 128, 192, and 256
+ bits.
+
+ This module provides the Twofish cipher algorithm that processes
+ eight blocks parallel using the AVX Instruction Set.
+
+ See also:
+ <http://www.schneier.com/twofish.html>
+
comment "Compression"
config CRYPTO_DEFLATE
@@ -714,6 +1325,15 @@ config CRYPTO_DEFLATE
You will most probably want this if using IPSec.
+config CRYPTO_ZLIB
+ tristate "Zlib compression algorithm"
+ select CRYPTO_PCOMP
+ select ZLIB_INFLATE
+ select ZLIB_DEFLATE
+ select NLATTR
+ help
+ This is the zlib algorithm.
+
config CRYPTO_LZO
tristate "LZO compression algorithm"
select CRYPTO_ALGAPI
@@ -722,18 +1342,69 @@ config CRYPTO_LZO
help
This is the LZO algorithm.
+config CRYPTO_842
+ tristate "842 compression algorithm"
+ depends on CRYPTO_DEV_NX_COMPRESS
+ # 842 uses lzo if the hardware becomes unavailable
+ select LZO_COMPRESS
+ select LZO_DECOMPRESS
+ help
+ This is the 842 algorithm.
+
+config CRYPTO_LZ4
+ tristate "LZ4 compression algorithm"
+ select CRYPTO_ALGAPI
+ select LZ4_COMPRESS
+ select LZ4_DECOMPRESS
+ help
+ This is the LZ4 algorithm.
+
+config CRYPTO_LZ4HC
+ tristate "LZ4HC compression algorithm"
+ select CRYPTO_ALGAPI
+ select LZ4HC_COMPRESS
+ select LZ4_DECOMPRESS
+ help
+ This is the LZ4 high compression mode algorithm.
+
comment "Random Number Generation"
config CRYPTO_ANSI_CPRNG
tristate "Pseudo Random Number Generation for Cryptographic modules"
+ default m
select CRYPTO_AES
select CRYPTO_RNG
- select CRYPTO_FIPS
help
This option enables the generic pseudo random number generator
for cryptographic modules. Uses the Algorithm specified in
- ANSI X9.31 A.2.4
+ ANSI X9.31 A.2.4. Note that this option must be enabled if
+ CRYPTO_FIPS is selected
+
+config CRYPTO_USER_API
+ tristate
+
+config CRYPTO_USER_API_HASH
+ tristate "User-space interface for hash algorithms"
+ depends on NET
+ select CRYPTO_HASH
+ select CRYPTO_USER_API
+ help
+ This option enables the user-spaces interface for hash
+ algorithms.
+
+config CRYPTO_USER_API_SKCIPHER
+ tristate "User-space interface for symmetric key cipher algorithms"
+ depends on NET
+ select CRYPTO_BLKCIPHER
+ select CRYPTO_USER_API
+ help
+ This option enables the user-spaces interface for symmetric
+ key cipher algorithms.
+
+config CRYPTO_HASH_INFO
+ bool
source "drivers/crypto/Kconfig"
+source crypto/asymmetric_keys/Kconfig
endif # if CRYPTO
diff --git a/crypto/Makefile b/crypto/Makefile
index 46b08bf2035..38e64231dcd 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -3,32 +3,38 @@
#
obj-$(CONFIG_CRYPTO) += crypto.o
-crypto-objs := api.o cipher.o digest.o compress.o
+crypto-y := api.o cipher.o compress.o memneq.o
+
+obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o
obj-$(CONFIG_CRYPTO_FIPS) += fips.o
crypto_algapi-$(CONFIG_PROC_FS) += proc.o
-crypto_algapi-objs := algapi.o scatterwalk.o $(crypto_algapi-y)
+crypto_algapi-y := algapi.o scatterwalk.o $(crypto_algapi-y)
obj-$(CONFIG_CRYPTO_ALGAPI2) += crypto_algapi.o
obj-$(CONFIG_CRYPTO_AEAD2) += aead.o
-crypto_blkcipher-objs := ablkcipher.o
-crypto_blkcipher-objs += blkcipher.o
+crypto_blkcipher-y := ablkcipher.o
+crypto_blkcipher-y += blkcipher.o
obj-$(CONFIG_CRYPTO_BLKCIPHER2) += crypto_blkcipher.o
obj-$(CONFIG_CRYPTO_BLKCIPHER2) += chainiv.o
obj-$(CONFIG_CRYPTO_BLKCIPHER2) += eseqiv.o
obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
-crypto_hash-objs := hash.o
-crypto_hash-objs += ahash.o
-crypto_hash-objs += shash.o
+crypto_hash-y += ahash.o
+crypto_hash-y += shash.o
obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o
-cryptomgr-objs := algboss.o testmgr.o
+obj-$(CONFIG_CRYPTO_PCOMP2) += pcompress.o
+
+cryptomgr-y := algboss.o testmgr.o
obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o
+obj-$(CONFIG_CRYPTO_USER) += crypto_user.o
+obj-$(CONFIG_CRYPTO_CMAC) += cmac.o
obj-$(CONFIG_CRYPTO_HMAC) += hmac.o
+obj-$(CONFIG_CRYPTO_VMAC) += vmac.o
obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o
obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o
obj-$(CONFIG_CRYPTO_MD4) += md4.o
@@ -52,17 +58,20 @@ obj-$(CONFIG_CRYPTO_XTS) += xts.o
obj-$(CONFIG_CRYPTO_CTR) += ctr.o
obj-$(CONFIG_CRYPTO_GCM) += gcm.o
obj-$(CONFIG_CRYPTO_CCM) += ccm.o
+obj-$(CONFIG_CRYPTO_PCRYPT) += pcrypt.o
obj-$(CONFIG_CRYPTO_CRYPTD) += cryptd.o
obj-$(CONFIG_CRYPTO_DES) += des_generic.o
obj-$(CONFIG_CRYPTO_FCRYPT) += fcrypt.o
-obj-$(CONFIG_CRYPTO_BLOWFISH) += blowfish.o
-obj-$(CONFIG_CRYPTO_TWOFISH) += twofish.o
+obj-$(CONFIG_CRYPTO_BLOWFISH) += blowfish_generic.o
+obj-$(CONFIG_CRYPTO_BLOWFISH_COMMON) += blowfish_common.o
+obj-$(CONFIG_CRYPTO_TWOFISH) += twofish_generic.o
obj-$(CONFIG_CRYPTO_TWOFISH_COMMON) += twofish_common.o
-obj-$(CONFIG_CRYPTO_SERPENT) += serpent.o
+obj-$(CONFIG_CRYPTO_SERPENT) += serpent_generic.o
obj-$(CONFIG_CRYPTO_AES) += aes_generic.o
-obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia.o
-obj-$(CONFIG_CRYPTO_CAST5) += cast5.o
-obj-$(CONFIG_CRYPTO_CAST6) += cast6.o
+obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia_generic.o
+obj-$(CONFIG_CRYPTO_CAST_COMMON) += cast_common.o
+obj-$(CONFIG_CRYPTO_CAST5) += cast5_generic.o
+obj-$(CONFIG_CRYPTO_CAST6) += cast6_generic.o
obj-$(CONFIG_CRYPTO_ARC4) += arc4.o
obj-$(CONFIG_CRYPTO_TEA) += tea.o
obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o
@@ -70,17 +79,30 @@ obj-$(CONFIG_CRYPTO_ANUBIS) += anubis.o
obj-$(CONFIG_CRYPTO_SEED) += seed.o
obj-$(CONFIG_CRYPTO_SALSA20) += salsa20_generic.o
obj-$(CONFIG_CRYPTO_DEFLATE) += deflate.o
+obj-$(CONFIG_CRYPTO_ZLIB) += zlib.o
obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o
-obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o
-obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o
+obj-$(CONFIG_CRYPTO_CRC32C) += crc32c_generic.o
+obj-$(CONFIG_CRYPTO_CRC32) += crc32.o
+obj-$(CONFIG_CRYPTO_CRCT10DIF) += crct10dif_common.o crct10dif_generic.o
+obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o authencesn.o
obj-$(CONFIG_CRYPTO_LZO) += lzo.o
+obj-$(CONFIG_CRYPTO_LZ4) += lz4.o
+obj-$(CONFIG_CRYPTO_LZ4HC) += lz4hc.o
+obj-$(CONFIG_CRYPTO_842) += 842.o
obj-$(CONFIG_CRYPTO_RNG2) += rng.o
obj-$(CONFIG_CRYPTO_RNG2) += krng.o
obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o
obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o
+obj-$(CONFIG_CRYPTO_GHASH) += ghash-generic.o
+obj-$(CONFIG_CRYPTO_USER_API) += af_alg.o
+obj-$(CONFIG_CRYPTO_USER_API_HASH) += algif_hash.o
+obj-$(CONFIG_CRYPTO_USER_API_SKCIPHER) += algif_skcipher.o
#
# generic algorithms and the async_tx api
#
obj-$(CONFIG_XOR_BLOCKS) += xor.o
obj-$(CONFIG_ASYNC_CORE) += async_tx/
+obj-$(CONFIG_ASYMMETRIC_KEY_TYPE) += asymmetric_keys/
+obj-$(CONFIG_CRYPTO_HASH_INFO) += hash_info.o
+obj-$(CONFIG_CRYPTO_ABLK_HELPER) += ablk_helper.o
diff --git a/crypto/ablk_helper.c b/crypto/ablk_helper.c
new file mode 100644
index 00000000000..ffe7278d4bd
--- /dev/null
+++ b/crypto/ablk_helper.c
@@ -0,0 +1,150 @@
+/*
+ * Shared async block cipher helpers
+ *
+ * Copyright (c) 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
+ *
+ * Based on aesni-intel_glue.c by:
+ * Copyright (C) 2008, Intel Corp.
+ * Author: Huang Ying <ying.huang@intel.com>
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
+ * USA
+ *
+ */
+
+#include <linux/kernel.h>
+#include <linux/crypto.h>
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/hardirq.h>
+#include <crypto/algapi.h>
+#include <crypto/cryptd.h>
+#include <crypto/ablk_helper.h>
+#include <asm/simd.h>
+
+int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
+ unsigned int key_len)
+{
+ struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm);
+ struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base;
+ int err;
+
+ crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
+ crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm)
+ & CRYPTO_TFM_REQ_MASK);
+ err = crypto_ablkcipher_setkey(child, key, key_len);
+ crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child)
+ & CRYPTO_TFM_RES_MASK);
+ return err;
+}
+EXPORT_SYMBOL_GPL(ablk_set_key);
+
+int __ablk_encrypt(struct ablkcipher_request *req)
+{
+ struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
+ struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm);
+ struct blkcipher_desc desc;
+
+ desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
+ desc.info = req->info;
+ desc.flags = 0;
+
+ return crypto_blkcipher_crt(desc.tfm)->encrypt(
+ &desc, req->dst, req->src, req->nbytes);
+}
+EXPORT_SYMBOL_GPL(__ablk_encrypt);
+
+int ablk_encrypt(struct ablkcipher_request *req)
+{
+ struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
+ struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm);
+
+ if (!may_use_simd()) {
+ struct ablkcipher_request *cryptd_req =
+ ablkcipher_request_ctx(req);
+
+ *cryptd_req = *req;
+ ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
+
+ return crypto_ablkcipher_encrypt(cryptd_req);
+ } else {
+ return __ablk_encrypt(req);
+ }
+}
+EXPORT_SYMBOL_GPL(ablk_encrypt);
+
+int ablk_decrypt(struct ablkcipher_request *req)
+{
+ struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
+ struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm);
+
+ if (!may_use_simd()) {
+ struct ablkcipher_request *cryptd_req =
+ ablkcipher_request_ctx(req);
+
+ *cryptd_req = *req;
+ ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
+
+ return crypto_ablkcipher_decrypt(cryptd_req);
+ } else {
+ struct blkcipher_desc desc;
+
+ desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
+ desc.info = req->info;
+ desc.flags = 0;
+
+ return crypto_blkcipher_crt(desc.tfm)->decrypt(
+ &desc, req->dst, req->src, req->nbytes);
+ }
+}
+EXPORT_SYMBOL_GPL(ablk_decrypt);
+
+void ablk_exit(struct crypto_tfm *tfm)
+{
+ struct async_helper_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ cryptd_free_ablkcipher(ctx->cryptd_tfm);
+}
+EXPORT_SYMBOL_GPL(ablk_exit);
+
+int ablk_init_common(struct crypto_tfm *tfm, const char *drv_name)
+{
+ struct async_helper_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct cryptd_ablkcipher *cryptd_tfm;
+
+ cryptd_tfm = cryptd_alloc_ablkcipher(drv_name, 0, 0);
+ if (IS_ERR(cryptd_tfm))
+ return PTR_ERR(cryptd_tfm);
+
+ ctx->cryptd_tfm = cryptd_tfm;
+ tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +
+ crypto_ablkcipher_reqsize(&cryptd_tfm->base);
+
+ return 0;
+}
+EXPORT_SYMBOL_GPL(ablk_init_common);
+
+int ablk_init(struct crypto_tfm *tfm)
+{
+ char drv_name[CRYPTO_MAX_ALG_NAME];
+
+ snprintf(drv_name, sizeof(drv_name), "__driver-%s",
+ crypto_tfm_alg_driver_name(tfm));
+
+ return ablk_init_common(tfm, drv_name);
+}
+EXPORT_SYMBOL_GPL(ablk_init);
+
+MODULE_LICENSE("GPL");
diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c
index 94140b3756f..40886c48990 100644
--- a/crypto/ablkcipher.c
+++ b/crypto/ablkcipher.c
@@ -1,6 +1,6 @@
/*
* Asynchronous block chaining cipher operations.
- *
+ *
* This is the asynchronous version of blkcipher.c indicating completion
* via a callback.
*
@@ -8,23 +8,300 @@
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
+ * Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
#include <crypto/internal/skcipher.h>
+#include <linux/cpumask.h>
#include <linux/err.h>
-#include <linux/init.h>
#include <linux/kernel.h>
-#include <linux/module.h>
#include <linux/rtnetlink.h>
#include <linux/sched.h>
#include <linux/slab.h>
#include <linux/seq_file.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
+
+#include <crypto/scatterwalk.h>
#include "internal.h"
+struct ablkcipher_buffer {
+ struct list_head entry;
+ struct scatter_walk dst;
+ unsigned int len;
+ void *data;
+};
+
+enum {
+ ABLKCIPHER_WALK_SLOW = 1 << 0,
+};
+
+static inline void ablkcipher_buffer_write(struct ablkcipher_buffer *p)
+{
+ scatterwalk_copychunks(p->data, &p->dst, p->len, 1);
+}
+
+void __ablkcipher_walk_complete(struct ablkcipher_walk *walk)
+{
+ struct ablkcipher_buffer *p, *tmp;
+
+ list_for_each_entry_safe(p, tmp, &walk->buffers, entry) {
+ ablkcipher_buffer_write(p);
+ list_del(&p->entry);
+ kfree(p);
+ }
+}
+EXPORT_SYMBOL_GPL(__ablkcipher_walk_complete);
+
+static inline void ablkcipher_queue_write(struct ablkcipher_walk *walk,
+ struct ablkcipher_buffer *p)
+{
+ p->dst = walk->out;
+ list_add_tail(&p->entry, &walk->buffers);
+}
+
+/* Get a spot of the specified length that does not straddle a page.
+ * The caller needs to ensure that there is enough space for this operation.
+ */
+static inline u8 *ablkcipher_get_spot(u8 *start, unsigned int len)
+{
+ u8 *end_page = (u8 *)(((unsigned long)(start + len - 1)) & PAGE_MASK);
+ return max(start, end_page);
+}
+
+static inline unsigned int ablkcipher_done_slow(struct ablkcipher_walk *walk,
+ unsigned int bsize)
+{
+ unsigned int n = bsize;
+
+ for (;;) {
+ unsigned int len_this_page = scatterwalk_pagelen(&walk->out);
+
+ if (len_this_page > n)
+ len_this_page = n;
+ scatterwalk_advance(&walk->out, n);
+ if (n == len_this_page)
+ break;
+ n -= len_this_page;
+ scatterwalk_start(&walk->out, scatterwalk_sg_next(walk->out.sg));
+ }
+
+ return bsize;
+}
+
+static inline unsigned int ablkcipher_done_fast(struct ablkcipher_walk *walk,
+ unsigned int n)
+{
+ scatterwalk_advance(&walk->in, n);
+ scatterwalk_advance(&walk->out, n);
+
+ return n;
+}
+
+static int ablkcipher_walk_next(struct ablkcipher_request *req,
+ struct ablkcipher_walk *walk);
+
+int ablkcipher_walk_done(struct ablkcipher_request *req,
+ struct ablkcipher_walk *walk, int err)
+{
+ struct crypto_tfm *tfm = req->base.tfm;
+ unsigned int nbytes = 0;
+
+ if (likely(err >= 0)) {
+ unsigned int n = walk->nbytes - err;
+
+ if (likely(!(walk->flags & ABLKCIPHER_WALK_SLOW)))
+ n = ablkcipher_done_fast(walk, n);
+ else if (WARN_ON(err)) {
+ err = -EINVAL;
+ goto err;
+ } else
+ n = ablkcipher_done_slow(walk, n);
+
+ nbytes = walk->total - n;
+ err = 0;
+ }
+
+ scatterwalk_done(&walk->in, 0, nbytes);
+ scatterwalk_done(&walk->out, 1, nbytes);
+
+err:
+ walk->total = nbytes;
+ walk->nbytes = nbytes;
+
+ if (nbytes) {
+ crypto_yield(req->base.flags);
+ return ablkcipher_walk_next(req, walk);
+ }
+
+ if (walk->iv != req->info)
+ memcpy(req->info, walk->iv, tfm->crt_ablkcipher.ivsize);
+ kfree(walk->iv_buffer);
+
+ return err;
+}
+EXPORT_SYMBOL_GPL(ablkcipher_walk_done);
+
+static inline int ablkcipher_next_slow(struct ablkcipher_request *req,
+ struct ablkcipher_walk *walk,
+ unsigned int bsize,
+ unsigned int alignmask,
+ void **src_p, void **dst_p)
+{
+ unsigned aligned_bsize = ALIGN(bsize, alignmask + 1);
+ struct ablkcipher_buffer *p;
+ void *src, *dst, *base;
+ unsigned int n;
+
+ n = ALIGN(sizeof(struct ablkcipher_buffer), alignmask + 1);
+ n += (aligned_bsize * 3 - (alignmask + 1) +
+ (alignmask & ~(crypto_tfm_ctx_alignment() - 1)));
+
+ p = kmalloc(n, GFP_ATOMIC);
+ if (!p)
+ return ablkcipher_walk_done(req, walk, -ENOMEM);
+
+ base = p + 1;
+
+ dst = (u8 *)ALIGN((unsigned long)base, alignmask + 1);
+ src = dst = ablkcipher_get_spot(dst, bsize);
+
+ p->len = bsize;
+ p->data = dst;
+
+ scatterwalk_copychunks(src, &walk->in, bsize, 0);
+
+ ablkcipher_queue_write(walk, p);
+
+ walk->nbytes = bsize;
+ walk->flags |= ABLKCIPHER_WALK_SLOW;
+
+ *src_p = src;
+ *dst_p = dst;
+
+ return 0;
+}
+
+static inline int ablkcipher_copy_iv(struct ablkcipher_walk *walk,
+ struct crypto_tfm *tfm,
+ unsigned int alignmask)
+{
+ unsigned bs = walk->blocksize;
+ unsigned int ivsize = tfm->crt_ablkcipher.ivsize;
+ unsigned aligned_bs = ALIGN(bs, alignmask + 1);
+ unsigned int size = aligned_bs * 2 + ivsize + max(aligned_bs, ivsize) -
+ (alignmask + 1);
+ u8 *iv;
+
+ size += alignmask & ~(crypto_tfm_ctx_alignment() - 1);
+ walk->iv_buffer = kmalloc(size, GFP_ATOMIC);
+ if (!walk->iv_buffer)
+ return -ENOMEM;
+
+ iv = (u8 *)ALIGN((unsigned long)walk->iv_buffer, alignmask + 1);
+ iv = ablkcipher_get_spot(iv, bs) + aligned_bs;
+ iv = ablkcipher_get_spot(iv, bs) + aligned_bs;
+ iv = ablkcipher_get_spot(iv, ivsize);
+
+ walk->iv = memcpy(iv, walk->iv, ivsize);
+ return 0;
+}
+
+static inline int ablkcipher_next_fast(struct ablkcipher_request *req,
+ struct ablkcipher_walk *walk)
+{
+ walk->src.page = scatterwalk_page(&walk->in);
+ walk->src.offset = offset_in_page(walk->in.offset);
+ walk->dst.page = scatterwalk_page(&walk->out);
+ walk->dst.offset = offset_in_page(walk->out.offset);
+
+ return 0;
+}
+
+static int ablkcipher_walk_next(struct ablkcipher_request *req,
+ struct ablkcipher_walk *walk)
+{
+ struct crypto_tfm *tfm = req->base.tfm;
+ unsigned int alignmask, bsize, n;
+ void *src, *dst;
+ int err;
+
+ alignmask = crypto_tfm_alg_alignmask(tfm);
+ n = walk->total;
+ if (unlikely(n < crypto_tfm_alg_blocksize(tfm))) {
+ req->base.flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
+ return ablkcipher_walk_done(req, walk, -EINVAL);
+ }
+
+ walk->flags &= ~ABLKCIPHER_WALK_SLOW;
+ src = dst = NULL;
+
+ bsize = min(walk->blocksize, n);
+ n = scatterwalk_clamp(&walk->in, n);
+ n = scatterwalk_clamp(&walk->out, n);
+
+ if (n < bsize ||
+ !scatterwalk_aligned(&walk->in, alignmask) ||
+ !scatterwalk_aligned(&walk->out, alignmask)) {
+ err = ablkcipher_next_slow(req, walk, bsize, alignmask,
+ &src, &dst);
+ goto set_phys_lowmem;
+ }
+
+ walk->nbytes = n;
+
+ return ablkcipher_next_fast(req, walk);
+
+set_phys_lowmem:
+ if (err >= 0) {
+ walk->src.page = virt_to_page(src);
+ walk->dst.page = virt_to_page(dst);
+ walk->src.offset = ((unsigned long)src & (PAGE_SIZE - 1));
+ walk->dst.offset = ((unsigned long)dst & (PAGE_SIZE - 1));
+ }
+
+ return err;
+}
+
+static int ablkcipher_walk_first(struct ablkcipher_request *req,
+ struct ablkcipher_walk *walk)
+{
+ struct crypto_tfm *tfm = req->base.tfm;
+ unsigned int alignmask;
+
+ alignmask = crypto_tfm_alg_alignmask(tfm);
+ if (WARN_ON_ONCE(in_irq()))
+ return -EDEADLK;
+
+ walk->nbytes = walk->total;
+ if (unlikely(!walk->total))
+ return 0;
+
+ walk->iv_buffer = NULL;
+ walk->iv = req->info;
+ if (unlikely(((unsigned long)walk->iv & alignmask))) {
+ int err = ablkcipher_copy_iv(walk, tfm, alignmask);
+ if (err)
+ return err;
+ }
+
+ scatterwalk_start(&walk->in, walk->in.sg);
+ scatterwalk_start(&walk->out, walk->out.sg);
+
+ return ablkcipher_walk_next(req, walk);
+}
+
+int ablkcipher_walk_phys(struct ablkcipher_request *req,
+ struct ablkcipher_walk *walk)
+{
+ walk->blocksize = crypto_tfm_alg_blocksize(req->base.tfm);
+ return ablkcipher_walk_first(req, walk);
+}
+EXPORT_SYMBOL_GPL(ablkcipher_walk_phys);
+
static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key,
unsigned int keylen)
{
@@ -102,6 +379,35 @@ static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type,
return 0;
}
+#ifdef CONFIG_NET
+static int crypto_ablkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_blkcipher rblkcipher;
+
+ strncpy(rblkcipher.type, "ablkcipher", sizeof(rblkcipher.type));
+ strncpy(rblkcipher.geniv, alg->cra_ablkcipher.geniv ?: "<default>",
+ sizeof(rblkcipher.geniv));
+
+ rblkcipher.blocksize = alg->cra_blocksize;
+ rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
+ rblkcipher.max_keysize = alg->cra_ablkcipher.max_keysize;
+ rblkcipher.ivsize = alg->cra_ablkcipher.ivsize;
+
+ if (nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
+ sizeof(struct crypto_report_blkcipher), &rblkcipher))
+ goto nla_put_failure;
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_ablkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg)
@@ -124,6 +430,7 @@ const struct crypto_type crypto_ablkcipher_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_ablkcipher_show,
#endif
+ .report = crypto_ablkcipher_report,
};
EXPORT_SYMBOL_GPL(crypto_ablkcipher_type);
@@ -153,6 +460,35 @@ static int crypto_init_givcipher_ops(struct crypto_tfm *tfm, u32 type,
return 0;
}
+#ifdef CONFIG_NET
+static int crypto_givcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_blkcipher rblkcipher;
+
+ strncpy(rblkcipher.type, "givcipher", sizeof(rblkcipher.type));
+ strncpy(rblkcipher.geniv, alg->cra_ablkcipher.geniv ?: "<built-in>",
+ sizeof(rblkcipher.geniv));
+
+ rblkcipher.blocksize = alg->cra_blocksize;
+ rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
+ rblkcipher.max_keysize = alg->cra_ablkcipher.max_keysize;
+ rblkcipher.ivsize = alg->cra_ablkcipher.ivsize;
+
+ if (nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
+ sizeof(struct crypto_report_blkcipher), &rblkcipher))
+ goto nla_put_failure;
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_givcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg)
@@ -175,12 +511,19 @@ const struct crypto_type crypto_givcipher_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_givcipher_show,
#endif
+ .report = crypto_givcipher_report,
};
EXPORT_SYMBOL_GPL(crypto_givcipher_type);
const char *crypto_default_geniv(const struct crypto_alg *alg)
{
- return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv";
+ if (((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
+ CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize :
+ alg->cra_ablkcipher.ivsize) !=
+ alg->cra_blocksize)
+ return "chainiv";
+
+ return "eseqiv";
}
static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
@@ -201,8 +544,9 @@ static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
int err;
larval = crypto_larval_lookup(alg->cra_driver_name,
+ (type & ~CRYPTO_ALG_TYPE_MASK) |
CRYPTO_ALG_TYPE_GIVCIPHER,
- CRYPTO_ALG_TYPE_MASK);
+ mask | CRYPTO_ALG_TYPE_MASK);
err = PTR_ERR(larval);
if (IS_ERR(larval))
goto out;
@@ -264,8 +608,7 @@ out:
return err;
}
-static struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type,
- u32 mask)
+struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type, u32 mask)
{
struct crypto_alg *alg;
@@ -282,8 +625,28 @@ static struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type,
alg->cra_ablkcipher.ivsize))
return alg;
+ crypto_mod_put(alg);
+ alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED,
+ mask & ~CRYPTO_ALG_TESTED);
+ if (IS_ERR(alg))
+ return alg;
+
+ if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
+ CRYPTO_ALG_TYPE_GIVCIPHER) {
+ if ((alg->cra_flags ^ type ^ ~mask) & CRYPTO_ALG_TESTED) {
+ crypto_mod_put(alg);
+ alg = ERR_PTR(-ENOENT);
+ }
+ return alg;
+ }
+
+ BUG_ON(!((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
+ CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize :
+ alg->cra_ablkcipher.ivsize));
+
return ERR_PTR(crypto_givcipher_default(alg, type, mask));
}
+EXPORT_SYMBOL_GPL(crypto_lookup_skcipher);
int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
u32 type, u32 mask)
diff --git a/crypto/aead.c b/crypto/aead.c
index 3a6f3f52c7c..547491e35c6 100644
--- a/crypto/aead.c
+++ b/crypto/aead.c
@@ -1,13 +1,13 @@
/*
* AEAD: Authenticated Encryption with Associated Data
- *
+ *
* This file provides API support for AEAD algorithms.
*
* Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
+ * Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
@@ -18,8 +18,11 @@
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/rtnetlink.h>
+#include <linux/sched.h>
#include <linux/slab.h>
#include <linux/seq_file.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
#include "internal.h"
@@ -108,6 +111,34 @@ static int crypto_init_aead_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
return 0;
}
+#ifdef CONFIG_NET
+static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_aead raead;
+ struct aead_alg *aead = &alg->cra_aead;
+
+ strncpy(raead.type, "aead", sizeof(raead.type));
+ strncpy(raead.geniv, aead->geniv ?: "<built-in>", sizeof(raead.geniv));
+
+ raead.blocksize = alg->cra_blocksize;
+ raead.maxauthsize = aead->maxauthsize;
+ raead.ivsize = aead->ivsize;
+
+ if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
+ sizeof(struct crypto_report_aead), &raead))
+ goto nla_put_failure;
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
@@ -129,6 +160,7 @@ const struct crypto_type crypto_aead_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_aead_show,
#endif
+ .report = crypto_aead_report,
};
EXPORT_SYMBOL_GPL(crypto_aead_type);
@@ -164,6 +196,35 @@ static int crypto_init_nivaead_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
return 0;
}
+#ifdef CONFIG_NET
+static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_aead raead;
+ struct aead_alg *aead = &alg->cra_aead;
+
+ strncpy(raead.type, "nivaead", sizeof(raead.type));
+ strncpy(raead.geniv, aead->geniv, sizeof(raead.geniv));
+
+ raead.blocksize = alg->cra_blocksize;
+ raead.maxauthsize = aead->maxauthsize;
+ raead.ivsize = aead->ivsize;
+
+ if (nla_put(skb, CRYPTOCFGA_REPORT_AEAD,
+ sizeof(struct crypto_report_aead), &raead))
+ goto nla_put_failure;
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
+
static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
@@ -185,6 +246,7 @@ const struct crypto_type crypto_nivaead_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_nivaead_show,
#endif
+ .report = crypto_nivaead_report,
};
EXPORT_SYMBOL_GPL(crypto_nivaead_type);
@@ -219,18 +281,16 @@ struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl,
int err;
algt = crypto_get_attr_type(tb);
- err = PTR_ERR(algt);
if (IS_ERR(algt))
- return ERR_PTR(err);
+ return ERR_CAST(algt);
if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) &
algt->mask)
return ERR_PTR(-EINVAL);
name = crypto_attr_alg_name(tb[1]);
- err = PTR_ERR(name);
if (IS_ERR(name))
- return ERR_PTR(err);
+ return ERR_CAST(name);
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
if (!inst)
@@ -407,8 +467,7 @@ out:
return err;
}
-static struct crypto_alg *crypto_lookup_aead(const char *name, u32 type,
- u32 mask)
+struct crypto_alg *crypto_lookup_aead(const char *name, u32 type, u32 mask)
{
struct crypto_alg *alg;
@@ -422,8 +481,25 @@ static struct crypto_alg *crypto_lookup_aead(const char *name, u32 type,
if (!alg->cra_aead.ivsize)
return alg;
+ crypto_mod_put(alg);
+ alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED,
+ mask & ~CRYPTO_ALG_TESTED);
+ if (IS_ERR(alg))
+ return alg;
+
+ if (alg->cra_type == &crypto_aead_type) {
+ if ((alg->cra_flags ^ type ^ ~mask) & CRYPTO_ALG_TESTED) {
+ crypto_mod_put(alg);
+ alg = ERR_PTR(-ENOENT);
+ }
+ return alg;
+ }
+
+ BUG_ON(!alg->cra_aead.ivsize);
+
return ERR_PTR(crypto_nivaead_default(alg, type, mask));
}
+EXPORT_SYMBOL_GPL(crypto_lookup_aead);
int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
u32 type, u32 mask)
diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c
index b8b66ec3883..fd0d6b45497 100644
--- a/crypto/aes_generic.c
+++ b/crypto/aes_generic.c
@@ -1,4 +1,4 @@
-/*
+/*
* Cryptographic API.
*
* AES Cipher Algorithm.
@@ -62,7 +62,7 @@ static inline u8 byte(const u32 x, const unsigned n)
static const u32 rco_tab[10] = { 1, 2, 4, 8, 16, 32, 64, 128, 27, 54 };
-const u32 crypto_ft_tab[4][256] = {
+__visible const u32 crypto_ft_tab[4][256] = {
{
0xa56363c6, 0x847c7cf8, 0x997777ee, 0x8d7b7bf6,
0x0df2f2ff, 0xbd6b6bd6, 0xb16f6fde, 0x54c5c591,
@@ -326,7 +326,7 @@ const u32 crypto_ft_tab[4][256] = {
}
};
-const u32 crypto_fl_tab[4][256] = {
+__visible const u32 crypto_fl_tab[4][256] = {
{
0x00000063, 0x0000007c, 0x00000077, 0x0000007b,
0x000000f2, 0x0000006b, 0x0000006f, 0x000000c5,
@@ -590,7 +590,7 @@ const u32 crypto_fl_tab[4][256] = {
}
};
-const u32 crypto_it_tab[4][256] = {
+__visible const u32 crypto_it_tab[4][256] = {
{
0x50a7f451, 0x5365417e, 0xc3a4171a, 0x965e273a,
0xcb6bab3b, 0xf1459d1f, 0xab58faac, 0x9303e34b,
@@ -854,7 +854,7 @@ const u32 crypto_it_tab[4][256] = {
}
};
-const u32 crypto_il_tab[4][256] = {
+__visible const u32 crypto_il_tab[4][256] = {
{
0x00000052, 0x00000009, 0x0000006a, 0x000000d5,
0x00000030, 0x00000036, 0x000000a5, 0x00000038,
@@ -1127,7 +1127,7 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
#define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b)
-#define imix_col(y,x) do { \
+#define imix_col(y, x) do { \
u = star_x(x); \
v = star_x(u); \
w = star_x(v); \
@@ -1174,7 +1174,7 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
ctx->key_enc[6 * i + 11] = t; \
} while (0)
-#define loop8(i) do { \
+#define loop8tophalf(i) do { \
t = ror32(t, 8); \
t = ls_box(t) ^ rco_tab[i]; \
t ^= ctx->key_enc[8 * i]; \
@@ -1185,6 +1185,10 @@ EXPORT_SYMBOL_GPL(crypto_il_tab);
ctx->key_enc[8 * i + 10] = t; \
t ^= ctx->key_enc[8 * i + 3]; \
ctx->key_enc[8 * i + 11] = t; \
+} while (0)
+
+#define loop8(i) do { \
+ loop8tophalf(i); \
t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \
ctx->key_enc[8 * i + 12] = t; \
t ^= ctx->key_enc[8 * i + 5]; \
@@ -1245,8 +1249,9 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
ctx->key_enc[5] = le32_to_cpu(key[5]);
ctx->key_enc[6] = le32_to_cpu(key[6]);
t = ctx->key_enc[7] = le32_to_cpu(key[7]);
- for (i = 0; i < 7; ++i)
+ for (i = 0; i < 6; ++i)
loop8(i);
+ loop8tophalf(i);
break;
}
@@ -1443,7 +1448,6 @@ static struct crypto_alg aes_alg = {
.cra_ctxsize = sizeof(struct crypto_aes_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
.cra_u = {
.cipher = {
.cia_min_keysize = AES_MIN_KEY_SIZE,
diff --git a/crypto/af_alg.c b/crypto/af_alg.c
new file mode 100644
index 00000000000..6a3ad801158
--- /dev/null
+++ b/crypto/af_alg.c
@@ -0,0 +1,485 @@
+/*
+ * af_alg: User-space algorithm interface
+ *
+ * This file provides the user-space API for algorithms.
+ *
+ * Copyright (c) 2010 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <linux/atomic.h>
+#include <crypto/if_alg.h>
+#include <linux/crypto.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+#include <linux/list.h>
+#include <linux/module.h>
+#include <linux/net.h>
+#include <linux/rwsem.h>
+#include <linux/security.h>
+
+struct alg_type_list {
+ const struct af_alg_type *type;
+ struct list_head list;
+};
+
+static atomic_long_t alg_memory_allocated;
+
+static struct proto alg_proto = {
+ .name = "ALG",
+ .owner = THIS_MODULE,
+ .memory_allocated = &alg_memory_allocated,
+ .obj_size = sizeof(struct alg_sock),
+};
+
+static LIST_HEAD(alg_types);
+static DECLARE_RWSEM(alg_types_sem);
+
+static const struct af_alg_type *alg_get_type(const char *name)
+{
+ const struct af_alg_type *type = ERR_PTR(-ENOENT);
+ struct alg_type_list *node;
+
+ down_read(&alg_types_sem);
+ list_for_each_entry(node, &alg_types, list) {
+ if (strcmp(node->type->name, name))
+ continue;
+
+ if (try_module_get(node->type->owner))
+ type = node->type;
+ break;
+ }
+ up_read(&alg_types_sem);
+
+ return type;
+}
+
+int af_alg_register_type(const struct af_alg_type *type)
+{
+ struct alg_type_list *node;
+ int err = -EEXIST;
+
+ down_write(&alg_types_sem);
+ list_for_each_entry(node, &alg_types, list) {
+ if (!strcmp(node->type->name, type->name))
+ goto unlock;
+ }
+
+ node = kmalloc(sizeof(*node), GFP_KERNEL);
+ err = -ENOMEM;
+ if (!node)
+ goto unlock;
+
+ type->ops->owner = THIS_MODULE;
+ node->type = type;
+ list_add(&node->list, &alg_types);
+ err = 0;
+
+unlock:
+ up_write(&alg_types_sem);
+
+ return err;
+}
+EXPORT_SYMBOL_GPL(af_alg_register_type);
+
+int af_alg_unregister_type(const struct af_alg_type *type)
+{
+ struct alg_type_list *node;
+ int err = -ENOENT;
+
+ down_write(&alg_types_sem);
+ list_for_each_entry(node, &alg_types, list) {
+ if (strcmp(node->type->name, type->name))
+ continue;
+
+ list_del(&node->list);
+ kfree(node);
+ err = 0;
+ break;
+ }
+ up_write(&alg_types_sem);
+
+ return err;
+}
+EXPORT_SYMBOL_GPL(af_alg_unregister_type);
+
+static void alg_do_release(const struct af_alg_type *type, void *private)
+{
+ if (!type)
+ return;
+
+ type->release(private);
+ module_put(type->owner);
+}
+
+int af_alg_release(struct socket *sock)
+{
+ if (sock->sk)
+ sock_put(sock->sk);
+ return 0;
+}
+EXPORT_SYMBOL_GPL(af_alg_release);
+
+static int alg_bind(struct socket *sock, struct sockaddr *uaddr, int addr_len)
+{
+ struct sock *sk = sock->sk;
+ struct alg_sock *ask = alg_sk(sk);
+ struct sockaddr_alg *sa = (void *)uaddr;
+ const struct af_alg_type *type;
+ void *private;
+
+ if (sock->state == SS_CONNECTED)
+ return -EINVAL;
+
+ if (addr_len != sizeof(*sa))
+ return -EINVAL;
+
+ sa->salg_type[sizeof(sa->salg_type) - 1] = 0;
+ sa->salg_name[sizeof(sa->salg_name) - 1] = 0;
+
+ type = alg_get_type(sa->salg_type);
+ if (IS_ERR(type) && PTR_ERR(type) == -ENOENT) {
+ request_module("algif-%s", sa->salg_type);
+ type = alg_get_type(sa->salg_type);
+ }
+
+ if (IS_ERR(type))
+ return PTR_ERR(type);
+
+ private = type->bind(sa->salg_name, sa->salg_feat, sa->salg_mask);
+ if (IS_ERR(private)) {
+ module_put(type->owner);
+ return PTR_ERR(private);
+ }
+
+ lock_sock(sk);
+
+ swap(ask->type, type);
+ swap(ask->private, private);
+
+ release_sock(sk);
+
+ alg_do_release(type, private);
+
+ return 0;
+}
+
+static int alg_setkey(struct sock *sk, char __user *ukey,
+ unsigned int keylen)
+{
+ struct alg_sock *ask = alg_sk(sk);
+ const struct af_alg_type *type = ask->type;
+ u8 *key;
+ int err;
+
+ key = sock_kmalloc(sk, keylen, GFP_KERNEL);
+ if (!key)
+ return -ENOMEM;
+
+ err = -EFAULT;
+ if (copy_from_user(key, ukey, keylen))
+ goto out;
+
+ err = type->setkey(ask->private, key, keylen);
+
+out:
+ sock_kfree_s(sk, key, keylen);
+
+ return err;
+}
+
+static int alg_setsockopt(struct socket *sock, int level, int optname,
+ char __user *optval, unsigned int optlen)
+{
+ struct sock *sk = sock->sk;
+ struct alg_sock *ask = alg_sk(sk);
+ const struct af_alg_type *type;
+ int err = -ENOPROTOOPT;
+
+ lock_sock(sk);
+ type = ask->type;
+
+ if (level != SOL_ALG || !type)
+ goto unlock;
+
+ switch (optname) {
+ case ALG_SET_KEY:
+ if (sock->state == SS_CONNECTED)
+ goto unlock;
+ if (!type->setkey)
+ goto unlock;
+
+ err = alg_setkey(sk, optval, optlen);
+ }
+
+unlock:
+ release_sock(sk);
+
+ return err;
+}
+
+int af_alg_accept(struct sock *sk, struct socket *newsock)
+{
+ struct alg_sock *ask = alg_sk(sk);
+ const struct af_alg_type *type;
+ struct sock *sk2;
+ int err;
+
+ lock_sock(sk);
+ type = ask->type;
+
+ err = -EINVAL;
+ if (!type)
+ goto unlock;
+
+ sk2 = sk_alloc(sock_net(sk), PF_ALG, GFP_KERNEL, &alg_proto);
+ err = -ENOMEM;
+ if (!sk2)
+ goto unlock;
+
+ sock_init_data(newsock, sk2);
+ sock_graft(sk2, newsock);
+ security_sk_clone(sk, sk2);
+
+ err = type->accept(ask->private, sk2);
+ if (err) {
+ sk_free(sk2);
+ goto unlock;
+ }
+
+ sk2->sk_family = PF_ALG;
+
+ sock_hold(sk);
+ alg_sk(sk2)->parent = sk;
+ alg_sk(sk2)->type = type;
+
+ newsock->ops = type->ops;
+ newsock->state = SS_CONNECTED;
+
+ err = 0;
+
+unlock:
+ release_sock(sk);
+
+ return err;
+}
+EXPORT_SYMBOL_GPL(af_alg_accept);
+
+static int alg_accept(struct socket *sock, struct socket *newsock, int flags)
+{
+ return af_alg_accept(sock->sk, newsock);
+}
+
+static const struct proto_ops alg_proto_ops = {
+ .family = PF_ALG,
+ .owner = THIS_MODULE,
+
+ .connect = sock_no_connect,
+ .socketpair = sock_no_socketpair,
+ .getname = sock_no_getname,
+ .ioctl = sock_no_ioctl,
+ .listen = sock_no_listen,
+ .shutdown = sock_no_shutdown,
+ .getsockopt = sock_no_getsockopt,
+ .mmap = sock_no_mmap,
+ .sendpage = sock_no_sendpage,
+ .sendmsg = sock_no_sendmsg,
+ .recvmsg = sock_no_recvmsg,
+ .poll = sock_no_poll,
+
+ .bind = alg_bind,
+ .release = af_alg_release,
+ .setsockopt = alg_setsockopt,
+ .accept = alg_accept,
+};
+
+static void alg_sock_destruct(struct sock *sk)
+{
+ struct alg_sock *ask = alg_sk(sk);
+
+ alg_do_release(ask->type, ask->private);
+}
+
+static int alg_create(struct net *net, struct socket *sock, int protocol,
+ int kern)
+{
+ struct sock *sk;
+ int err;
+
+ if (sock->type != SOCK_SEQPACKET)
+ return -ESOCKTNOSUPPORT;
+ if (protocol != 0)
+ return -EPROTONOSUPPORT;
+
+ err = -ENOMEM;
+ sk = sk_alloc(net, PF_ALG, GFP_KERNEL, &alg_proto);
+ if (!sk)
+ goto out;
+
+ sock->ops = &alg_proto_ops;
+ sock_init_data(sock, sk);
+
+ sk->sk_family = PF_ALG;
+ sk->sk_destruct = alg_sock_destruct;
+
+ return 0;
+out:
+ return err;
+}
+
+static const struct net_proto_family alg_family = {
+ .family = PF_ALG,
+ .create = alg_create,
+ .owner = THIS_MODULE,
+};
+
+int af_alg_make_sg(struct af_alg_sgl *sgl, void __user *addr, int len,
+ int write)
+{
+ unsigned long from = (unsigned long)addr;
+ unsigned long npages;
+ unsigned off;
+ int err;
+ int i;
+
+ err = -EFAULT;
+ if (!access_ok(write ? VERIFY_READ : VERIFY_WRITE, addr, len))
+ goto out;
+
+ off = from & ~PAGE_MASK;
+ npages = (off + len + PAGE_SIZE - 1) >> PAGE_SHIFT;
+ if (npages > ALG_MAX_PAGES)
+ npages = ALG_MAX_PAGES;
+
+ err = get_user_pages_fast(from, npages, write, sgl->pages);
+ if (err < 0)
+ goto out;
+
+ npages = err;
+ err = -EINVAL;
+ if (WARN_ON(npages == 0))
+ goto out;
+
+ err = 0;
+
+ sg_init_table(sgl->sg, npages);
+
+ for (i = 0; i < npages; i++) {
+ int plen = min_t(int, len, PAGE_SIZE - off);
+
+ sg_set_page(sgl->sg + i, sgl->pages[i], plen, off);
+
+ off = 0;
+ len -= plen;
+ err += plen;
+ }
+
+out:
+ return err;
+}
+EXPORT_SYMBOL_GPL(af_alg_make_sg);
+
+void af_alg_free_sg(struct af_alg_sgl *sgl)
+{
+ int i;
+
+ i = 0;
+ do {
+ put_page(sgl->pages[i]);
+ } while (!sg_is_last(sgl->sg + (i++)));
+}
+EXPORT_SYMBOL_GPL(af_alg_free_sg);
+
+int af_alg_cmsg_send(struct msghdr *msg, struct af_alg_control *con)
+{
+ struct cmsghdr *cmsg;
+
+ for (cmsg = CMSG_FIRSTHDR(msg); cmsg; cmsg = CMSG_NXTHDR(msg, cmsg)) {
+ if (!CMSG_OK(msg, cmsg))
+ return -EINVAL;
+ if (cmsg->cmsg_level != SOL_ALG)
+ continue;
+
+ switch(cmsg->cmsg_type) {
+ case ALG_SET_IV:
+ if (cmsg->cmsg_len < CMSG_LEN(sizeof(*con->iv)))
+ return -EINVAL;
+ con->iv = (void *)CMSG_DATA(cmsg);
+ if (cmsg->cmsg_len < CMSG_LEN(con->iv->ivlen +
+ sizeof(*con->iv)))
+ return -EINVAL;
+ break;
+
+ case ALG_SET_OP:
+ if (cmsg->cmsg_len < CMSG_LEN(sizeof(u32)))
+ return -EINVAL;
+ con->op = *(u32 *)CMSG_DATA(cmsg);
+ break;
+
+ default:
+ return -EINVAL;
+ }
+ }
+
+ return 0;
+}
+EXPORT_SYMBOL_GPL(af_alg_cmsg_send);
+
+int af_alg_wait_for_completion(int err, struct af_alg_completion *completion)
+{
+ switch (err) {
+ case -EINPROGRESS:
+ case -EBUSY:
+ wait_for_completion(&completion->completion);
+ reinit_completion(&completion->completion);
+ err = completion->err;
+ break;
+ };
+
+ return err;
+}
+EXPORT_SYMBOL_GPL(af_alg_wait_for_completion);
+
+void af_alg_complete(struct crypto_async_request *req, int err)
+{
+ struct af_alg_completion *completion = req->data;
+
+ completion->err = err;
+ complete(&completion->completion);
+}
+EXPORT_SYMBOL_GPL(af_alg_complete);
+
+static int __init af_alg_init(void)
+{
+ int err = proto_register(&alg_proto, 0);
+
+ if (err)
+ goto out;
+
+ err = sock_register(&alg_family);
+ if (err != 0)
+ goto out_unregister_proto;
+
+out:
+ return err;
+
+out_unregister_proto:
+ proto_unregister(&alg_proto);
+ goto out;
+}
+
+static void __exit af_alg_exit(void)
+{
+ sock_unregister(PF_ALG);
+ proto_unregister(&alg_proto);
+}
+
+module_init(af_alg_init);
+module_exit(af_alg_exit);
+MODULE_LICENSE("GPL");
+MODULE_ALIAS_NETPROTO(AF_ALG);
diff --git a/crypto/ahash.c b/crypto/ahash.c
index b2d1ee32cfe..f2a5d8f656f 100644
--- a/crypto/ahash.c
+++ b/crypto/ahash.c
@@ -15,15 +15,31 @@
#include <crypto/internal/hash.h>
#include <crypto/scatterwalk.h>
+#include <linux/bug.h>
#include <linux/err.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/sched.h>
#include <linux/slab.h>
#include <linux/seq_file.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
#include "internal.h"
+struct ahash_request_priv {
+ crypto_completion_t complete;
+ void *data;
+ u8 *result;
+ void *ubuf[] CRYPTO_MINALIGN_ATTR;
+};
+
+static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash)
+{
+ return container_of(crypto_hash_alg_common(hash), struct ahash_alg,
+ halg);
+}
+
static int hash_walk_next(struct crypto_hash_walk *walk)
{
unsigned int alignmask = walk->alignmask;
@@ -31,11 +47,17 @@ static int hash_walk_next(struct crypto_hash_walk *walk)
unsigned int nbytes = min(walk->entrylen,
((unsigned int)(PAGE_SIZE)) - offset);
- walk->data = crypto_kmap(walk->pg, 0);
+ if (walk->flags & CRYPTO_ALG_ASYNC)
+ walk->data = kmap(walk->pg);
+ else
+ walk->data = kmap_atomic(walk->pg);
walk->data += offset;
- if (offset & alignmask)
- nbytes = alignmask + 1 - (offset & alignmask);
+ if (offset & alignmask) {
+ unsigned int unaligned = alignmask + 1 - (offset & alignmask);
+ if (nbytes > unaligned)
+ nbytes = unaligned;
+ }
walk->entrylen -= nbytes;
return nbytes;
@@ -65,7 +87,6 @@ int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
walk->data -= walk->offset;
if (nbytes && walk->offset & alignmask && !err) {
- walk->offset += alignmask - 1;
walk->offset = ALIGN(walk->offset, alignmask + 1);
walk->data += walk->offset;
@@ -76,16 +97,25 @@ int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
return nbytes;
}
- crypto_kunmap(walk->data, 0);
- crypto_yield(walk->flags);
+ if (walk->flags & CRYPTO_ALG_ASYNC)
+ kunmap(walk->pg);
+ else {
+ kunmap_atomic(walk->data);
+ /*
+ * The may sleep test only makes sense for sync users.
+ * Async users don't need to sleep here anyway.
+ */
+ crypto_yield(walk->flags);
+ }
if (err)
return err;
- walk->offset = 0;
-
- if (nbytes)
+ if (nbytes) {
+ walk->offset = 0;
+ walk->pg++;
return hash_walk_next(walk);
+ }
if (!walk->total)
return 0;
@@ -106,12 +136,31 @@ int crypto_hash_walk_first(struct ahash_request *req,
walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req));
walk->sg = req->src;
- walk->flags = req->base.flags;
+ walk->flags = req->base.flags & CRYPTO_TFM_REQ_MASK;
return hash_walk_new_entry(walk);
}
EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
+int crypto_ahash_walk_first(struct ahash_request *req,
+ struct crypto_hash_walk *walk)
+{
+ walk->total = req->nbytes;
+
+ if (!walk->total)
+ return 0;
+
+ walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req));
+ walk->sg = req->src;
+ walk->flags = req->base.flags & CRYPTO_TFM_REQ_MASK;
+ walk->flags |= CRYPTO_ALG_ASYNC;
+
+ BUILD_BUG_ON(CRYPTO_TFM_REQ_MASK & CRYPTO_ALG_ASYNC);
+
+ return hash_walk_new_entry(walk);
+}
+EXPORT_SYMBOL_GPL(crypto_ahash_walk_first);
+
int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
struct crypto_hash_walk *walk,
struct scatterlist *sg, unsigned int len)
@@ -123,7 +172,7 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
walk->alignmask = crypto_hash_alignmask(hdesc->tfm);
walk->sg = sg;
- walk->flags = hdesc->flags;
+ walk->flags = hdesc->flags & CRYPTO_TFM_REQ_MASK;
return hash_walk_new_entry(walk);
}
@@ -131,36 +180,34 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
unsigned int keylen)
{
- struct ahash_alg *ahash = crypto_ahash_alg(tfm);
unsigned long alignmask = crypto_ahash_alignmask(tfm);
int ret;
u8 *buffer, *alignbuffer;
unsigned long absize;
absize = keylen + alignmask;
- buffer = kmalloc(absize, GFP_ATOMIC);
+ buffer = kmalloc(absize, GFP_KERNEL);
if (!buffer)
return -ENOMEM;
alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
memcpy(alignbuffer, key, keylen);
- ret = ahash->setkey(tfm, alignbuffer, keylen);
- memset(alignbuffer, 0, keylen);
- kfree(buffer);
+ ret = tfm->setkey(tfm, alignbuffer, keylen);
+ kzfree(buffer);
return ret;
}
-static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
+int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
unsigned int keylen)
{
- struct ahash_alg *ahash = crypto_ahash_alg(tfm);
unsigned long alignmask = crypto_ahash_alignmask(tfm);
if ((unsigned long)key & alignmask)
return ahash_setkey_unaligned(tfm, key, keylen);
- return ahash->setkey(tfm, key, keylen);
+ return tfm->setkey(tfm, key, keylen);
}
+EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
unsigned int keylen)
@@ -168,43 +215,289 @@ static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
return -ENOSYS;
}
-int crypto_ahash_import(struct ahash_request *req, const u8 *in)
+static inline unsigned int ahash_align_buffer_size(unsigned len,
+ unsigned long mask)
+{
+ return len + (mask & ~(crypto_tfm_ctx_alignment() - 1));
+}
+
+static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt)
{
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- struct ahash_alg *alg = crypto_ahash_alg(tfm);
+ unsigned long alignmask = crypto_ahash_alignmask(tfm);
+ unsigned int ds = crypto_ahash_digestsize(tfm);
+ struct ahash_request_priv *priv;
- memcpy(ahash_request_ctx(req), in, crypto_ahash_reqsize(tfm));
+ priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask),
+ (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
+ GFP_KERNEL : GFP_ATOMIC);
+ if (!priv)
+ return -ENOMEM;
- if (alg->reinit)
- alg->reinit(req);
+ /*
+ * WARNING: Voodoo programming below!
+ *
+ * The code below is obscure and hard to understand, thus explanation
+ * is necessary. See include/crypto/hash.h and include/linux/crypto.h
+ * to understand the layout of structures used here!
+ *
+ * The code here will replace portions of the ORIGINAL request with
+ * pointers to new code and buffers so the hashing operation can store
+ * the result in aligned buffer. We will call the modified request
+ * an ADJUSTED request.
+ *
+ * The newly mangled request will look as such:
+ *
+ * req {
+ * .result = ADJUSTED[new aligned buffer]
+ * .base.complete = ADJUSTED[pointer to completion function]
+ * .base.data = ADJUSTED[*req (pointer to self)]
+ * .priv = ADJUSTED[new priv] {
+ * .result = ORIGINAL(result)
+ * .complete = ORIGINAL(base.complete)
+ * .data = ORIGINAL(base.data)
+ * }
+ */
+
+ priv->result = req->result;
+ priv->complete = req->base.complete;
+ priv->data = req->base.data;
+ /*
+ * WARNING: We do not backup req->priv here! The req->priv
+ * is for internal use of the Crypto API and the
+ * user must _NOT_ _EVER_ depend on it's content!
+ */
+
+ req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1);
+ req->base.complete = cplt;
+ req->base.data = req;
+ req->priv = priv;
return 0;
}
-EXPORT_SYMBOL_GPL(crypto_ahash_import);
-static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type,
- u32 mask)
+static void ahash_restore_req(struct ahash_request *req)
+{
+ struct ahash_request_priv *priv = req->priv;
+
+ /* Restore the original crypto request. */
+ req->result = priv->result;
+ req->base.complete = priv->complete;
+ req->base.data = priv->data;
+ req->priv = NULL;
+
+ /* Free the req->priv.priv from the ADJUSTED request. */
+ kzfree(priv);
+}
+
+static void ahash_op_unaligned_finish(struct ahash_request *req, int err)
+{
+ struct ahash_request_priv *priv = req->priv;
+
+ if (err == -EINPROGRESS)
+ return;
+
+ if (!err)
+ memcpy(priv->result, req->result,
+ crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
+
+ ahash_restore_req(req);
+}
+
+static void ahash_op_unaligned_done(struct crypto_async_request *req, int err)
{
- return alg->cra_ctxsize;
+ struct ahash_request *areq = req->data;
+
+ /*
+ * Restore the original request, see ahash_op_unaligned() for what
+ * goes where.
+ *
+ * The "struct ahash_request *req" here is in fact the "req.base"
+ * from the ADJUSTED request from ahash_op_unaligned(), thus as it
+ * is a pointer to self, it is also the ADJUSTED "req" .
+ */
+
+ /* First copy req->result into req->priv.result */
+ ahash_op_unaligned_finish(areq, err);
+
+ /* Complete the ORIGINAL request. */
+ areq->base.complete(&areq->base, err);
}
-static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
+static int ahash_op_unaligned(struct ahash_request *req,
+ int (*op)(struct ahash_request *))
{
- struct ahash_alg *alg = &tfm->__crt_alg->cra_ahash;
- struct ahash_tfm *crt = &tfm->crt_ahash;
+ int err;
- if (alg->digestsize > PAGE_SIZE / 8)
- return -EINVAL;
+ err = ahash_save_req(req, ahash_op_unaligned_done);
+ if (err)
+ return err;
+
+ err = op(req);
+ ahash_op_unaligned_finish(req, err);
+
+ return err;
+}
+
+static int crypto_ahash_op(struct ahash_request *req,
+ int (*op)(struct ahash_request *))
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
+ unsigned long alignmask = crypto_ahash_alignmask(tfm);
+
+ if ((unsigned long)req->result & alignmask)
+ return ahash_op_unaligned(req, op);
+
+ return op(req);
+}
+
+int crypto_ahash_final(struct ahash_request *req)
+{
+ return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final);
+}
+EXPORT_SYMBOL_GPL(crypto_ahash_final);
+
+int crypto_ahash_finup(struct ahash_request *req)
+{
+ return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup);
+}
+EXPORT_SYMBOL_GPL(crypto_ahash_finup);
+
+int crypto_ahash_digest(struct ahash_request *req)
+{
+ return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest);
+}
+EXPORT_SYMBOL_GPL(crypto_ahash_digest);
+
+static void ahash_def_finup_finish2(struct ahash_request *req, int err)
+{
+ struct ahash_request_priv *priv = req->priv;
+
+ if (err == -EINPROGRESS)
+ return;
+
+ if (!err)
+ memcpy(priv->result, req->result,
+ crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
+
+ ahash_restore_req(req);
+}
+
+static void ahash_def_finup_done2(struct crypto_async_request *req, int err)
+{
+ struct ahash_request *areq = req->data;
+
+ ahash_def_finup_finish2(areq, err);
+
+ areq->base.complete(&areq->base, err);
+}
+
+static int ahash_def_finup_finish1(struct ahash_request *req, int err)
+{
+ if (err)
+ goto out;
+
+ req->base.complete = ahash_def_finup_done2;
+ req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
+ err = crypto_ahash_reqtfm(req)->final(req);
+
+out:
+ ahash_def_finup_finish2(req, err);
+ return err;
+}
+
+static void ahash_def_finup_done1(struct crypto_async_request *req, int err)
+{
+ struct ahash_request *areq = req->data;
- crt->init = alg->init;
- crt->update = alg->update;
- crt->final = alg->final;
- crt->digest = alg->digest;
- crt->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey;
- crt->digestsize = alg->digestsize;
+ err = ahash_def_finup_finish1(areq, err);
+ areq->base.complete(&areq->base, err);
+}
+
+static int ahash_def_finup(struct ahash_request *req)
+{
+ struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
+ int err;
+
+ err = ahash_save_req(req, ahash_def_finup_done1);
+ if (err)
+ return err;
+
+ err = tfm->update(req);
+ return ahash_def_finup_finish1(req, err);
+}
+
+static int ahash_no_export(struct ahash_request *req, void *out)
+{
+ return -ENOSYS;
+}
+
+static int ahash_no_import(struct ahash_request *req, const void *in)
+{
+ return -ENOSYS;
+}
+
+static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
+{
+ struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
+ struct ahash_alg *alg = crypto_ahash_alg(hash);
+
+ hash->setkey = ahash_nosetkey;
+ hash->export = ahash_no_export;
+ hash->import = ahash_no_import;
+
+ if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
+ return crypto_init_shash_ops_async(tfm);
+
+ hash->init = alg->init;
+ hash->update = alg->update;
+ hash->final = alg->final;
+ hash->finup = alg->finup ?: ahash_def_finup;
+ hash->digest = alg->digest;
+
+ if (alg->setkey)
+ hash->setkey = alg->setkey;
+ if (alg->export)
+ hash->export = alg->export;
+ if (alg->import)
+ hash->import = alg->import;
+
+ return 0;
+}
+
+static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
+{
+ if (alg->cra_type == &crypto_ahash_type)
+ return alg->cra_ctxsize;
+
+ return sizeof(struct crypto_shash *);
+}
+
+#ifdef CONFIG_NET
+static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_hash rhash;
+
+ strncpy(rhash.type, "ahash", sizeof(rhash.type));
+
+ rhash.blocksize = alg->cra_blocksize;
+ rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
+
+ if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
+ sizeof(struct crypto_report_hash), &rhash))
+ goto nla_put_failure;
return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
}
+#else
+static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
@@ -214,17 +507,102 @@ static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
"yes" : "no");
seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
- seq_printf(m, "digestsize : %u\n", alg->cra_ahash.digestsize);
+ seq_printf(m, "digestsize : %u\n",
+ __crypto_hash_alg_common(alg)->digestsize);
}
const struct crypto_type crypto_ahash_type = {
- .ctxsize = crypto_ahash_ctxsize,
- .init = crypto_init_ahash_ops,
+ .extsize = crypto_ahash_extsize,
+ .init_tfm = crypto_ahash_init_tfm,
#ifdef CONFIG_PROC_FS
.show = crypto_ahash_show,
#endif
+ .report = crypto_ahash_report,
+ .maskclear = ~CRYPTO_ALG_TYPE_MASK,
+ .maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
+ .type = CRYPTO_ALG_TYPE_AHASH,
+ .tfmsize = offsetof(struct crypto_ahash, base),
};
EXPORT_SYMBOL_GPL(crypto_ahash_type);
+struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
+ u32 mask)
+{
+ return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
+}
+EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
+
+static int ahash_prepare_alg(struct ahash_alg *alg)
+{
+ struct crypto_alg *base = &alg->halg.base;
+
+ if (alg->halg.digestsize > PAGE_SIZE / 8 ||
+ alg->halg.statesize > PAGE_SIZE / 8)
+ return -EINVAL;
+
+ base->cra_type = &crypto_ahash_type;
+ base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
+ base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
+
+ return 0;
+}
+
+int crypto_register_ahash(struct ahash_alg *alg)
+{
+ struct crypto_alg *base = &alg->halg.base;
+ int err;
+
+ err = ahash_prepare_alg(alg);
+ if (err)
+ return err;
+
+ return crypto_register_alg(base);
+}
+EXPORT_SYMBOL_GPL(crypto_register_ahash);
+
+int crypto_unregister_ahash(struct ahash_alg *alg)
+{
+ return crypto_unregister_alg(&alg->halg.base);
+}
+EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
+
+int ahash_register_instance(struct crypto_template *tmpl,
+ struct ahash_instance *inst)
+{
+ int err;
+
+ err = ahash_prepare_alg(&inst->alg);
+ if (err)
+ return err;
+
+ return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
+}
+EXPORT_SYMBOL_GPL(ahash_register_instance);
+
+void ahash_free_instance(struct crypto_instance *inst)
+{
+ crypto_drop_spawn(crypto_instance_ctx(inst));
+ kfree(ahash_instance(inst));
+}
+EXPORT_SYMBOL_GPL(ahash_free_instance);
+
+int crypto_init_ahash_spawn(struct crypto_ahash_spawn *spawn,
+ struct hash_alg_common *alg,
+ struct crypto_instance *inst)
+{
+ return crypto_init_spawn2(&spawn->base, &alg->base, inst,
+ &crypto_ahash_type);
+}
+EXPORT_SYMBOL_GPL(crypto_init_ahash_spawn);
+
+struct hash_alg_common *ahash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
+{
+ struct crypto_alg *alg;
+
+ alg = crypto_attr_alg2(rta, &crypto_ahash_type, type, mask);
+ return IS_ERR(alg) ? ERR_CAST(alg) : __crypto_hash_alg_common(alg);
+}
+EXPORT_SYMBOL_GPL(ahash_attr_alg);
+
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
diff --git a/crypto/algapi.c b/crypto/algapi.c
index 56c62e2858d..7a1ae87f168 100644
--- a/crypto/algapi.c
+++ b/crypto/algapi.c
@@ -17,30 +17,13 @@
#include <linux/list.h>
#include <linux/module.h>
#include <linux/rtnetlink.h>
+#include <linux/slab.h>
#include <linux/string.h>
#include "internal.h"
-static void crypto_remove_final(struct list_head *list);
-
static LIST_HEAD(crypto_template_list);
-void crypto_larval_error(const char *name, u32 type, u32 mask)
-{
- struct crypto_alg *alg;
-
- alg = crypto_alg_lookup(name, type, mask);
-
- if (alg) {
- if (crypto_is_larval(alg)) {
- struct crypto_larval *larval = (void *)alg;
- complete_all(&larval->completion);
- }
- crypto_mod_put(alg);
- }
-}
-EXPORT_SYMBOL_GPL(crypto_larval_error);
-
static inline int crypto_set_driver_name(struct crypto_alg *alg)
{
static const char suffix[] = "-generic";
@@ -81,16 +64,35 @@ static void crypto_destroy_instance(struct crypto_alg *alg)
crypto_tmpl_put(tmpl);
}
+static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
+ struct list_head *stack,
+ struct list_head *top,
+ struct list_head *secondary_spawns)
+{
+ struct crypto_spawn *spawn, *n;
+
+ if (list_empty(stack))
+ return NULL;
+
+ spawn = list_first_entry(stack, struct crypto_spawn, list);
+ n = list_entry(spawn->list.next, struct crypto_spawn, list);
+
+ if (spawn->alg && &n->list != stack && !n->alg)
+ n->alg = (n->list.next == stack) ? alg :
+ &list_entry(n->list.next, struct crypto_spawn,
+ list)->inst->alg;
+
+ list_move(&spawn->list, secondary_spawns);
+
+ return &n->list == stack ? top : &n->inst->alg.cra_users;
+}
+
static void crypto_remove_spawn(struct crypto_spawn *spawn,
- struct list_head *list,
- struct list_head *secondary_spawns)
+ struct list_head *list)
{
struct crypto_instance *inst = spawn->inst;
struct crypto_template *tmpl = inst->tmpl;
- list_del_init(&spawn->list);
- spawn->alg = NULL;
-
if (crypto_is_dead(&inst->alg))
return;
@@ -106,27 +108,57 @@ static void crypto_remove_spawn(struct crypto_spawn *spawn,
hlist_del(&inst->list);
inst->alg.cra_destroy = crypto_destroy_instance;
- list_splice(&inst->alg.cra_users, secondary_spawns);
+ BUG_ON(!list_empty(&inst->alg.cra_users));
}
-static void crypto_remove_spawns(struct list_head *spawns,
- struct list_head *list, u32 new_type)
+void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
+ struct crypto_alg *nalg)
{
+ u32 new_type = (nalg ?: alg)->cra_flags;
struct crypto_spawn *spawn, *n;
LIST_HEAD(secondary_spawns);
+ struct list_head *spawns;
+ LIST_HEAD(stack);
+ LIST_HEAD(top);
+ spawns = &alg->cra_users;
list_for_each_entry_safe(spawn, n, spawns, list) {
if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
continue;
- crypto_remove_spawn(spawn, list, &secondary_spawns);
+ list_move(&spawn->list, &top);
}
- while (!list_empty(&secondary_spawns)) {
- list_for_each_entry_safe(spawn, n, &secondary_spawns, list)
- crypto_remove_spawn(spawn, list, &secondary_spawns);
+ spawns = &top;
+ do {
+ while (!list_empty(spawns)) {
+ struct crypto_instance *inst;
+
+ spawn = list_first_entry(spawns, struct crypto_spawn,
+ list);
+ inst = spawn->inst;
+
+ BUG_ON(&inst->alg == alg);
+
+ list_move(&spawn->list, &stack);
+
+ if (&inst->alg == nalg)
+ break;
+
+ spawn->alg = NULL;
+ spawns = &inst->alg.cra_users;
+ }
+ } while ((spawns = crypto_more_spawns(alg, &stack, &top,
+ &secondary_spawns)));
+
+ list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
+ if (spawn->alg)
+ list_move(&spawn->list, &spawn->alg->cra_users);
+ else
+ crypto_remove_spawn(spawn, list);
}
}
+EXPORT_SYMBOL_GPL(crypto_remove_spawns);
static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
{
@@ -181,7 +213,7 @@ static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
list_add(&alg->cra_list, &crypto_alg_list);
list_add(&larval->alg.cra_list, &crypto_alg_list);
-out:
+out:
return larval;
free_larval:
@@ -247,7 +279,6 @@ found:
continue;
larval->adult = alg;
- complete_all(&larval->completion);
continue;
}
@@ -258,7 +289,7 @@ found:
q->cra_priority > alg->cra_priority)
continue;
- crypto_remove_spawns(&q->cra_users, &list, alg->cra_flags);
+ crypto_remove_spawns(q, &list, alg);
}
complete:
@@ -271,7 +302,7 @@ unlock:
}
EXPORT_SYMBOL_GPL(crypto_alg_tested);
-static void crypto_remove_final(struct list_head *list)
+void crypto_remove_final(struct list_head *list)
{
struct crypto_alg *alg;
struct crypto_alg *n;
@@ -281,6 +312,7 @@ static void crypto_remove_final(struct list_head *list)
crypto_alg_put(alg);
}
}
+EXPORT_SYMBOL_GPL(crypto_remove_final);
static void crypto_wait_for_test(struct crypto_larval *larval)
{
@@ -330,7 +362,7 @@ static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg);
list_del_init(&alg->cra_list);
- crypto_remove_spawns(&alg->cra_users, list, alg->cra_flags);
+ crypto_remove_spawns(alg, list, NULL);
return 0;
}
@@ -339,7 +371,7 @@ int crypto_unregister_alg(struct crypto_alg *alg)
{
int ret;
LIST_HEAD(list);
-
+
down_write(&crypto_alg_sem);
ret = crypto_remove_alg(alg, &list);
up_write(&crypto_alg_sem);
@@ -356,6 +388,41 @@ int crypto_unregister_alg(struct crypto_alg *alg)
}
EXPORT_SYMBOL_GPL(crypto_unregister_alg);
+int crypto_register_algs(struct crypto_alg *algs, int count)
+{
+ int i, ret;
+
+ for (i = 0; i < count; i++) {
+ ret = crypto_register_alg(&algs[i]);
+ if (ret)
+ goto err;
+ }
+
+ return 0;
+
+err:
+ for (--i; i >= 0; --i)
+ crypto_unregister_alg(&algs[i]);
+
+ return ret;
+}
+EXPORT_SYMBOL_GPL(crypto_register_algs);
+
+int crypto_unregister_algs(struct crypto_alg *algs, int count)
+{
+ int i, ret;
+
+ for (i = 0; i < count; i++) {
+ ret = crypto_unregister_alg(&algs[i]);
+ if (ret)
+ pr_err("Failed to unregister %s %s: %d\n",
+ algs[i].cra_driver_name, algs[i].cra_name, ret);
+ }
+
+ return 0;
+}
+EXPORT_SYMBOL_GPL(crypto_unregister_algs);
+
int crypto_register_template(struct crypto_template *tmpl)
{
struct crypto_template *q;
@@ -380,7 +447,7 @@ EXPORT_SYMBOL_GPL(crypto_register_template);
void crypto_unregister_template(struct crypto_template *tmpl)
{
struct crypto_instance *inst;
- struct hlist_node *p, *n;
+ struct hlist_node *n;
struct hlist_head *list;
LIST_HEAD(users);
@@ -390,7 +457,7 @@ void crypto_unregister_template(struct crypto_template *tmpl)
list_del_init(&tmpl->list);
list = &tmpl->instances;
- hlist_for_each_entry(inst, p, list, list) {
+ hlist_for_each_entry(inst, list, list) {
int err = crypto_remove_alg(&inst->alg, &users);
BUG_ON(err);
}
@@ -399,7 +466,7 @@ void crypto_unregister_template(struct crypto_template *tmpl)
up_write(&crypto_alg_sem);
- hlist_for_each_entry_safe(inst, p, n, list, list) {
+ hlist_for_each_entry_safe(inst, n, list, list) {
BUG_ON(atomic_read(&inst->alg.cra_refcnt) != 1);
tmpl->free(inst);
}
@@ -428,7 +495,8 @@ static struct crypto_template *__crypto_lookup_template(const char *name)
struct crypto_template *crypto_lookup_template(const char *name)
{
- return try_then_request_module(__crypto_lookup_template(name), name);
+ return try_then_request_module(__crypto_lookup_template(name), "%s",
+ name);
}
EXPORT_SYMBOL_GPL(crypto_lookup_template);
@@ -443,6 +511,7 @@ int crypto_register_instance(struct crypto_template *tmpl,
goto err;
inst->alg.cra_module = tmpl->module;
+ inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
down_write(&crypto_alg_sem);
@@ -468,6 +537,35 @@ err:
}
EXPORT_SYMBOL_GPL(crypto_register_instance);
+int crypto_unregister_instance(struct crypto_alg *alg)
+{
+ int err;
+ struct crypto_instance *inst = (void *)alg;
+ struct crypto_template *tmpl = inst->tmpl;
+ LIST_HEAD(users);
+
+ if (!(alg->cra_flags & CRYPTO_ALG_INSTANCE))
+ return -EINVAL;
+
+ BUG_ON(atomic_read(&alg->cra_refcnt) != 1);
+
+ down_write(&crypto_alg_sem);
+
+ hlist_del_init(&inst->list);
+ err = crypto_remove_alg(alg, &users);
+
+ up_write(&crypto_alg_sem);
+
+ if (err)
+ return err;
+
+ tmpl->free(inst);
+ crypto_remove_final(&users);
+
+ return 0;
+}
+EXPORT_SYMBOL_GPL(crypto_unregister_instance);
+
int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
struct crypto_instance *inst, u32 mask)
{
@@ -488,20 +586,38 @@ int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
}
EXPORT_SYMBOL_GPL(crypto_init_spawn);
+int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
+ struct crypto_instance *inst,
+ const struct crypto_type *frontend)
+{
+ int err = -EINVAL;
+
+ if ((alg->cra_flags ^ frontend->type) & frontend->maskset)
+ goto out;
+
+ spawn->frontend = frontend;
+ err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
+
+out:
+ return err;
+}
+EXPORT_SYMBOL_GPL(crypto_init_spawn2);
+
void crypto_drop_spawn(struct crypto_spawn *spawn)
{
+ if (!spawn->alg)
+ return;
+
down_write(&crypto_alg_sem);
list_del(&spawn->list);
up_write(&crypto_alg_sem);
}
EXPORT_SYMBOL_GPL(crypto_drop_spawn);
-struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
- u32 mask)
+static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
{
struct crypto_alg *alg;
struct crypto_alg *alg2;
- struct crypto_tfm *tfm;
down_read(&crypto_alg_sem);
alg = spawn->alg;
@@ -516,6 +632,19 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
return ERR_PTR(-EAGAIN);
}
+ return alg;
+}
+
+struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
+ u32 mask)
+{
+ struct crypto_alg *alg;
+ struct crypto_tfm *tfm;
+
+ alg = crypto_spawn_alg(spawn);
+ if (IS_ERR(alg))
+ return ERR_CAST(alg);
+
tfm = ERR_PTR(-EINVAL);
if (unlikely((alg->cra_flags ^ type) & mask))
goto out_put_alg;
@@ -532,6 +661,27 @@ out_put_alg:
}
EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
+void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
+{
+ struct crypto_alg *alg;
+ struct crypto_tfm *tfm;
+
+ alg = crypto_spawn_alg(spawn);
+ if (IS_ERR(alg))
+ return ERR_CAST(alg);
+
+ tfm = crypto_create_tfm(alg, spawn->frontend);
+ if (IS_ERR(tfm))
+ goto out_put_alg;
+
+ return tfm;
+
+out_put_alg:
+ crypto_mod_put(alg);
+ return tfm;
+}
+EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
+
int crypto_register_notifier(struct notifier_block *nb)
{
return blocking_notifier_chain_register(&crypto_chain, nb);
@@ -595,19 +745,19 @@ const char *crypto_attr_alg_name(struct rtattr *rta)
}
EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
-struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask)
+struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
+ const struct crypto_type *frontend,
+ u32 type, u32 mask)
{
const char *name;
- int err;
name = crypto_attr_alg_name(rta);
- err = PTR_ERR(name);
if (IS_ERR(name))
- return ERR_PTR(err);
+ return ERR_CAST(name);
- return crypto_alg_mod_lookup(name, type, mask);
+ return crypto_find_alg(name, frontend, type, mask);
}
-EXPORT_SYMBOL_GPL(crypto_attr_alg);
+EXPORT_SYMBOL_GPL(crypto_attr_alg2);
int crypto_attr_u32(struct rtattr *rta, u32 *num)
{
@@ -627,17 +777,20 @@ int crypto_attr_u32(struct rtattr *rta, u32 *num)
}
EXPORT_SYMBOL_GPL(crypto_attr_u32);
-struct crypto_instance *crypto_alloc_instance(const char *name,
- struct crypto_alg *alg)
+void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
+ unsigned int head)
{
struct crypto_instance *inst;
- struct crypto_spawn *spawn;
+ char *p;
int err;
- inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
- if (!inst)
+ p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
+ GFP_KERNEL);
+ if (!p)
return ERR_PTR(-ENOMEM);
+ inst = (void *)(p + head);
+
err = -ENAMETOOLONG;
if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
@@ -647,6 +800,25 @@ struct crypto_instance *crypto_alloc_instance(const char *name,
name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
goto err_free_inst;
+ return p;
+
+err_free_inst:
+ kfree(p);
+ return ERR_PTR(err);
+}
+EXPORT_SYMBOL_GPL(crypto_alloc_instance2);
+
+struct crypto_instance *crypto_alloc_instance(const char *name,
+ struct crypto_alg *alg)
+{
+ struct crypto_instance *inst;
+ struct crypto_spawn *spawn;
+ int err;
+
+ inst = crypto_alloc_instance2(name, alg, 0);
+ if (IS_ERR(inst))
+ goto out;
+
spawn = crypto_instance_ctx(inst);
err = crypto_init_spawn(spawn, alg, inst,
CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
@@ -658,7 +830,10 @@ struct crypto_instance *crypto_alloc_instance(const char *name,
err_free_inst:
kfree(inst);
- return ERR_PTR(err);
+ inst = ERR_PTR(err);
+
+out:
+ return inst;
}
EXPORT_SYMBOL_GPL(crypto_alloc_instance);
@@ -692,7 +867,7 @@ out:
}
EXPORT_SYMBOL_GPL(crypto_enqueue_request);
-struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
+void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset)
{
struct list_head *request;
@@ -707,7 +882,14 @@ struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
request = queue->list.next;
list_del(request);
- return list_entry(request, struct crypto_async_request, list);
+ return (char *)list_entry(request, struct crypto_async_request, list) -
+ offset;
+}
+EXPORT_SYMBOL_GPL(__crypto_dequeue_request);
+
+struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
+{
+ return __crypto_dequeue_request(queue, 0);
}
EXPORT_SYMBOL_GPL(crypto_dequeue_request);
diff --git a/crypto/algboss.c b/crypto/algboss.c
index 4601e4267c8..76fc0b23fc6 100644
--- a/crypto/algboss.c
+++ b/crypto/algboss.c
@@ -10,7 +10,8 @@
*
*/
-#include <linux/crypto.h>
+#include <crypto/internal/aead.h>
+#include <linux/completion.h>
#include <linux/ctype.h>
#include <linux/err.h>
#include <linux/init.h>
@@ -19,6 +20,7 @@
#include <linux/notifier.h>
#include <linux/rtnetlink.h>
#include <linux/sched.h>
+#include <linux/slab.h>
#include <linux/string.h>
#include "internal.h"
@@ -43,9 +45,10 @@ struct cryptomgr_param {
} nu32;
} attrs[CRYPTO_MAX_ATTRS];
- char larval[CRYPTO_MAX_ALG_NAME];
char template[CRYPTO_MAX_ALG_NAME];
+ struct crypto_larval *larval;
+
u32 otype;
u32 omask;
};
@@ -65,9 +68,14 @@ static int cryptomgr_probe(void *data)
tmpl = crypto_lookup_template(param->template);
if (!tmpl)
- goto err;
+ goto out;
do {
+ if (tmpl->create) {
+ err = tmpl->create(tmpl, param->tb);
+ continue;
+ }
+
inst = tmpl->alloc(param->tb);
if (IS_ERR(inst))
err = PTR_ERR(inst);
@@ -77,16 +85,11 @@ static int cryptomgr_probe(void *data)
crypto_tmpl_put(tmpl);
- if (err)
- goto err;
-
out:
+ complete_all(&param->larval->completion);
+ crypto_alg_put(&param->larval->alg);
kfree(param);
module_put_and_exit(0);
-
-err:
- crypto_larval_error(param->larval, param->otype, param->omask);
- goto out;
}
static int cryptomgr_schedule_probe(struct crypto_larval *larval)
@@ -184,14 +187,19 @@ static int cryptomgr_schedule_probe(struct crypto_larval *larval)
param->otype = larval->alg.cra_flags;
param->omask = larval->mask;
- memcpy(param->larval, larval->alg.cra_name, CRYPTO_MAX_ALG_NAME);
+ crypto_alg_get(&larval->alg);
+ param->larval = larval;
thread = kthread_run(cryptomgr_probe, param, "cryptomgr_probe");
if (IS_ERR(thread))
- goto err_free_param;
+ goto err_put_larval;
+
+ wait_for_completion_interruptible(&larval->completion);
return NOTIFY_STOP;
+err_put_larval:
+ crypto_alg_put(&larval->alg);
err_free_param:
kfree(param);
err_put_module:
@@ -206,8 +214,11 @@ static int cryptomgr_test(void *data)
u32 type = param->type;
int err = 0;
- if (!((type ^ CRYPTO_ALG_TYPE_BLKCIPHER) &
- CRYPTO_ALG_TYPE_BLKCIPHER_MASK) && !(type & CRYPTO_ALG_GENIV))
+#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
+ goto skiptest;
+#endif
+
+ if (type & CRYPTO_ALG_TESTED)
goto skiptest;
err = alg_test(param->driver, param->alg, type, CRYPTO_ALG_TESTED);
@@ -223,6 +234,7 @@ static int cryptomgr_schedule_test(struct crypto_alg *alg)
{
struct task_struct *thread;
struct crypto_test_param *param;
+ u32 type;
if (!try_module_get(THIS_MODULE))
goto err;
@@ -233,7 +245,19 @@ static int cryptomgr_schedule_test(struct crypto_alg *alg)
memcpy(param->driver, alg->cra_driver_name, sizeof(param->driver));
memcpy(param->alg, alg->cra_name, sizeof(param->alg));
- param->type = alg->cra_flags;
+ type = alg->cra_flags;
+
+ /* This piece of crap needs to disappear into per-type test hooks. */
+ if ((!((type ^ CRYPTO_ALG_TYPE_BLKCIPHER) &
+ CRYPTO_ALG_TYPE_BLKCIPHER_MASK) && !(type & CRYPTO_ALG_GENIV) &&
+ ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
+ CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize :
+ alg->cra_ablkcipher.ivsize)) ||
+ (!((type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK) &&
+ alg->cra_type == &crypto_nivaead_type && alg->cra_aead.ivsize))
+ type |= CRYPTO_ALG_TESTED;
+
+ param->type = type;
thread = kthread_run(cryptomgr_test, param, "cryptomgr_test");
if (IS_ERR(thread))
@@ -268,29 +292,13 @@ static struct notifier_block cryptomgr_notifier = {
static int __init cryptomgr_init(void)
{
- int err;
-
- err = testmgr_init();
- if (err)
- return err;
-
- err = crypto_register_notifier(&cryptomgr_notifier);
- if (err)
- goto free_testmgr;
-
- return 0;
-
-free_testmgr:
- testmgr_exit();
- return err;
+ return crypto_register_notifier(&cryptomgr_notifier);
}
static void __exit cryptomgr_exit(void)
{
int err = crypto_unregister_notifier(&cryptomgr_notifier);
BUG_ON(err);
-
- testmgr_exit();
}
subsys_initcall(cryptomgr_init);
diff --git a/crypto/algif_hash.c b/crypto/algif_hash.c
new file mode 100644
index 00000000000..850246206b1
--- /dev/null
+++ b/crypto/algif_hash.c
@@ -0,0 +1,324 @@
+/*
+ * algif_hash: User-space interface for hash algorithms
+ *
+ * This file provides the user-space API for hash algorithms.
+ *
+ * Copyright (c) 2010 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <crypto/hash.h>
+#include <crypto/if_alg.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+#include <linux/mm.h>
+#include <linux/module.h>
+#include <linux/net.h>
+#include <net/sock.h>
+
+struct hash_ctx {
+ struct af_alg_sgl sgl;
+
+ u8 *result;
+
+ struct af_alg_completion completion;
+
+ unsigned int len;
+ bool more;
+
+ struct ahash_request req;
+};
+
+static int hash_sendmsg(struct kiocb *unused, struct socket *sock,
+ struct msghdr *msg, size_t ignored)
+{
+ int limit = ALG_MAX_PAGES * PAGE_SIZE;
+ struct sock *sk = sock->sk;
+ struct alg_sock *ask = alg_sk(sk);
+ struct hash_ctx *ctx = ask->private;
+ unsigned long iovlen;
+ struct iovec *iov;
+ long copied = 0;
+ int err;
+
+ if (limit > sk->sk_sndbuf)
+ limit = sk->sk_sndbuf;
+
+ lock_sock(sk);
+ if (!ctx->more) {
+ err = crypto_ahash_init(&ctx->req);
+ if (err)
+ goto unlock;
+ }
+
+ ctx->more = 0;
+
+ for (iov = msg->msg_iov, iovlen = msg->msg_iovlen; iovlen > 0;
+ iovlen--, iov++) {
+ unsigned long seglen = iov->iov_len;
+ char __user *from = iov->iov_base;
+
+ while (seglen) {
+ int len = min_t(unsigned long, seglen, limit);
+ int newlen;
+
+ newlen = af_alg_make_sg(&ctx->sgl, from, len, 0);
+ if (newlen < 0) {
+ err = copied ? 0 : newlen;
+ goto unlock;
+ }
+
+ ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, NULL,
+ newlen);
+
+ err = af_alg_wait_for_completion(
+ crypto_ahash_update(&ctx->req),
+ &ctx->completion);
+
+ af_alg_free_sg(&ctx->sgl);
+
+ if (err)
+ goto unlock;
+
+ seglen -= newlen;
+ from += newlen;
+ copied += newlen;
+ }
+ }
+
+ err = 0;
+
+ ctx->more = msg->msg_flags & MSG_MORE;
+ if (!ctx->more) {
+ ahash_request_set_crypt(&ctx->req, NULL, ctx->result, 0);
+ err = af_alg_wait_for_completion(crypto_ahash_final(&ctx->req),
+ &ctx->completion);
+ }
+
+unlock:
+ release_sock(sk);
+
+ return err ?: copied;
+}
+
+static ssize_t hash_sendpage(struct socket *sock, struct page *page,
+ int offset, size_t size, int flags)
+{
+ struct sock *sk = sock->sk;
+ struct alg_sock *ask = alg_sk(sk);
+ struct hash_ctx *ctx = ask->private;
+ int err;
+
+ if (flags & MSG_SENDPAGE_NOTLAST)
+ flags |= MSG_MORE;
+
+ lock_sock(sk);
+ sg_init_table(ctx->sgl.sg, 1);
+ sg_set_page(ctx->sgl.sg, page, size, offset);
+
+ ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, ctx->result, size);
+
+ if (!(flags & MSG_MORE)) {
+ if (ctx->more)
+ err = crypto_ahash_finup(&ctx->req);
+ else
+ err = crypto_ahash_digest(&ctx->req);
+ } else {
+ if (!ctx->more) {
+ err = crypto_ahash_init(&ctx->req);
+ if (err)
+ goto unlock;
+ }
+
+ err = crypto_ahash_update(&ctx->req);
+ }
+
+ err = af_alg_wait_for_completion(err, &ctx->completion);
+ if (err)
+ goto unlock;
+
+ ctx->more = flags & MSG_MORE;
+
+unlock:
+ release_sock(sk);
+
+ return err ?: size;
+}
+
+static int hash_recvmsg(struct kiocb *unused, struct socket *sock,
+ struct msghdr *msg, size_t len, int flags)
+{
+ struct sock *sk = sock->sk;
+ struct alg_sock *ask = alg_sk(sk);
+ struct hash_ctx *ctx = ask->private;
+ unsigned ds = crypto_ahash_digestsize(crypto_ahash_reqtfm(&ctx->req));
+ int err;
+
+ if (len > ds)
+ len = ds;
+ else if (len < ds)
+ msg->msg_flags |= MSG_TRUNC;
+
+ lock_sock(sk);
+ if (ctx->more) {
+ ctx->more = 0;
+ ahash_request_set_crypt(&ctx->req, NULL, ctx->result, 0);
+ err = af_alg_wait_for_completion(crypto_ahash_final(&ctx->req),
+ &ctx->completion);
+ if (err)
+ goto unlock;
+ }
+
+ err = memcpy_toiovec(msg->msg_iov, ctx->result, len);
+
+unlock:
+ release_sock(sk);
+
+ return err ?: len;
+}
+
+static int hash_accept(struct socket *sock, struct socket *newsock, int flags)
+{
+ struct sock *sk = sock->sk;
+ struct alg_sock *ask = alg_sk(sk);
+ struct hash_ctx *ctx = ask->private;
+ struct ahash_request *req = &ctx->req;
+ char state[crypto_ahash_statesize(crypto_ahash_reqtfm(req))];
+ struct sock *sk2;
+ struct alg_sock *ask2;
+ struct hash_ctx *ctx2;
+ int err;
+
+ err = crypto_ahash_export(req, state);
+ if (err)
+ return err;
+
+ err = af_alg_accept(ask->parent, newsock);
+ if (err)
+ return err;
+
+ sk2 = newsock->sk;
+ ask2 = alg_sk(sk2);
+ ctx2 = ask2->private;
+ ctx2->more = 1;
+
+ err = crypto_ahash_import(&ctx2->req, state);
+ if (err) {
+ sock_orphan(sk2);
+ sock_put(sk2);
+ }
+
+ return err;
+}
+
+static struct proto_ops algif_hash_ops = {
+ .family = PF_ALG,
+
+ .connect = sock_no_connect,
+ .socketpair = sock_no_socketpair,
+ .getname = sock_no_getname,
+ .ioctl = sock_no_ioctl,
+ .listen = sock_no_listen,
+ .shutdown = sock_no_shutdown,
+ .getsockopt = sock_no_getsockopt,
+ .mmap = sock_no_mmap,
+ .bind = sock_no_bind,
+ .setsockopt = sock_no_setsockopt,
+ .poll = sock_no_poll,
+
+ .release = af_alg_release,
+ .sendmsg = hash_sendmsg,
+ .sendpage = hash_sendpage,
+ .recvmsg = hash_recvmsg,
+ .accept = hash_accept,
+};
+
+static void *hash_bind(const char *name, u32 type, u32 mask)
+{
+ return crypto_alloc_ahash(name, type, mask);
+}
+
+static void hash_release(void *private)
+{
+ crypto_free_ahash(private);
+}
+
+static int hash_setkey(void *private, const u8 *key, unsigned int keylen)
+{
+ return crypto_ahash_setkey(private, key, keylen);
+}
+
+static void hash_sock_destruct(struct sock *sk)
+{
+ struct alg_sock *ask = alg_sk(sk);
+ struct hash_ctx *ctx = ask->private;
+
+ sock_kfree_s(sk, ctx->result,
+ crypto_ahash_digestsize(crypto_ahash_reqtfm(&ctx->req)));
+ sock_kfree_s(sk, ctx, ctx->len);
+ af_alg_release_parent(sk);
+}
+
+static int hash_accept_parent(void *private, struct sock *sk)
+{
+ struct hash_ctx *ctx;
+ struct alg_sock *ask = alg_sk(sk);
+ unsigned len = sizeof(*ctx) + crypto_ahash_reqsize(private);
+ unsigned ds = crypto_ahash_digestsize(private);
+
+ ctx = sock_kmalloc(sk, len, GFP_KERNEL);
+ if (!ctx)
+ return -ENOMEM;
+
+ ctx->result = sock_kmalloc(sk, ds, GFP_KERNEL);
+ if (!ctx->result) {
+ sock_kfree_s(sk, ctx, len);
+ return -ENOMEM;
+ }
+
+ memset(ctx->result, 0, ds);
+
+ ctx->len = len;
+ ctx->more = 0;
+ af_alg_init_completion(&ctx->completion);
+
+ ask->private = ctx;
+
+ ahash_request_set_tfm(&ctx->req, private);
+ ahash_request_set_callback(&ctx->req, CRYPTO_TFM_REQ_MAY_BACKLOG,
+ af_alg_complete, &ctx->completion);
+
+ sk->sk_destruct = hash_sock_destruct;
+
+ return 0;
+}
+
+static const struct af_alg_type algif_type_hash = {
+ .bind = hash_bind,
+ .release = hash_release,
+ .setkey = hash_setkey,
+ .accept = hash_accept_parent,
+ .ops = &algif_hash_ops,
+ .name = "hash",
+ .owner = THIS_MODULE
+};
+
+static int __init algif_hash_init(void)
+{
+ return af_alg_register_type(&algif_type_hash);
+}
+
+static void __exit algif_hash_exit(void)
+{
+ int err = af_alg_unregister_type(&algif_type_hash);
+ BUG_ON(err);
+}
+
+module_init(algif_hash_init);
+module_exit(algif_hash_exit);
+MODULE_LICENSE("GPL");
diff --git a/crypto/algif_skcipher.c b/crypto/algif_skcipher.c
new file mode 100644
index 00000000000..a19c027b29b
--- /dev/null
+++ b/crypto/algif_skcipher.c
@@ -0,0 +1,635 @@
+/*
+ * algif_skcipher: User-space interface for skcipher algorithms
+ *
+ * This file provides the user-space API for symmetric key ciphers.
+ *
+ * Copyright (c) 2010 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <crypto/scatterwalk.h>
+#include <crypto/skcipher.h>
+#include <crypto/if_alg.h>
+#include <linux/init.h>
+#include <linux/list.h>
+#include <linux/kernel.h>
+#include <linux/mm.h>
+#include <linux/module.h>
+#include <linux/net.h>
+#include <net/sock.h>
+
+struct skcipher_sg_list {
+ struct list_head list;
+
+ int cur;
+
+ struct scatterlist sg[0];
+};
+
+struct skcipher_ctx {
+ struct list_head tsgl;
+ struct af_alg_sgl rsgl;
+
+ void *iv;
+
+ struct af_alg_completion completion;
+
+ unsigned used;
+
+ unsigned int len;
+ bool more;
+ bool merge;
+ bool enc;
+
+ struct ablkcipher_request req;
+};
+
+#define MAX_SGL_ENTS ((PAGE_SIZE - sizeof(struct skcipher_sg_list)) / \
+ sizeof(struct scatterlist) - 1)
+
+static inline int skcipher_sndbuf(struct sock *sk)
+{
+ struct alg_sock *ask = alg_sk(sk);
+ struct skcipher_ctx *ctx = ask->private;
+
+ return max_t(int, max_t(int, sk->sk_sndbuf & PAGE_MASK, PAGE_SIZE) -
+ ctx->used, 0);
+}
+
+static inline bool skcipher_writable(struct sock *sk)
+{
+ return PAGE_SIZE <= skcipher_sndbuf(sk);
+}
+
+static int skcipher_alloc_sgl(struct sock *sk)
+{
+ struct alg_sock *ask = alg_sk(sk);
+ struct skcipher_ctx *ctx = ask->private;
+ struct skcipher_sg_list *sgl;
+ struct scatterlist *sg = NULL;
+
+ sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list);
+ if (!list_empty(&ctx->tsgl))
+ sg = sgl->sg;
+
+ if (!sg || sgl->cur >= MAX_SGL_ENTS) {
+ sgl = sock_kmalloc(sk, sizeof(*sgl) +
+ sizeof(sgl->sg[0]) * (MAX_SGL_ENTS + 1),
+ GFP_KERNEL);
+ if (!sgl)
+ return -ENOMEM;
+
+ sg_init_table(sgl->sg, MAX_SGL_ENTS + 1);
+ sgl->cur = 0;
+
+ if (sg)
+ scatterwalk_sg_chain(sg, MAX_SGL_ENTS + 1, sgl->sg);
+
+ list_add_tail(&sgl->list, &ctx->tsgl);
+ }
+
+ return 0;
+}
+
+static void skcipher_pull_sgl(struct sock *sk, int used)
+{
+ struct alg_sock *ask = alg_sk(sk);
+ struct skcipher_ctx *ctx = ask->private;
+ struct skcipher_sg_list *sgl;
+ struct scatterlist *sg;
+ int i;
+
+ while (!list_empty(&ctx->tsgl)) {
+ sgl = list_first_entry(&ctx->tsgl, struct skcipher_sg_list,
+ list);
+ sg = sgl->sg;
+
+ for (i = 0; i < sgl->cur; i++) {
+ int plen = min_t(int, used, sg[i].length);
+
+ if (!sg_page(sg + i))
+ continue;
+
+ sg[i].length -= plen;
+ sg[i].offset += plen;
+
+ used -= plen;
+ ctx->used -= plen;
+
+ if (sg[i].length)
+ return;
+
+ put_page(sg_page(sg + i));
+ sg_assign_page(sg + i, NULL);
+ }
+
+ list_del(&sgl->list);
+ sock_kfree_s(sk, sgl,
+ sizeof(*sgl) + sizeof(sgl->sg[0]) *
+ (MAX_SGL_ENTS + 1));
+ }
+
+ if (!ctx->used)
+ ctx->merge = 0;
+}
+
+static void skcipher_free_sgl(struct sock *sk)
+{
+ struct alg_sock *ask = alg_sk(sk);
+ struct skcipher_ctx *ctx = ask->private;
+
+ skcipher_pull_sgl(sk, ctx->used);
+}
+
+static int skcipher_wait_for_wmem(struct sock *sk, unsigned flags)
+{
+ long timeout;
+ DEFINE_WAIT(wait);
+ int err = -ERESTARTSYS;
+
+ if (flags & MSG_DONTWAIT)
+ return -EAGAIN;
+
+ set_bit(SOCK_ASYNC_NOSPACE, &sk->sk_socket->flags);
+
+ for (;;) {
+ if (signal_pending(current))
+ break;
+ prepare_to_wait(sk_sleep(sk), &wait, TASK_INTERRUPTIBLE);
+ timeout = MAX_SCHEDULE_TIMEOUT;
+ if (sk_wait_event(sk, &timeout, skcipher_writable(sk))) {
+ err = 0;
+ break;
+ }
+ }
+ finish_wait(sk_sleep(sk), &wait);
+
+ return err;
+}
+
+static void skcipher_wmem_wakeup(struct sock *sk)
+{
+ struct socket_wq *wq;
+
+ if (!skcipher_writable(sk))
+ return;
+
+ rcu_read_lock();
+ wq = rcu_dereference(sk->sk_wq);
+ if (wq_has_sleeper(wq))
+ wake_up_interruptible_sync_poll(&wq->wait, POLLIN |
+ POLLRDNORM |
+ POLLRDBAND);
+ sk_wake_async(sk, SOCK_WAKE_WAITD, POLL_IN);
+ rcu_read_unlock();
+}
+
+static int skcipher_wait_for_data(struct sock *sk, unsigned flags)
+{
+ struct alg_sock *ask = alg_sk(sk);
+ struct skcipher_ctx *ctx = ask->private;
+ long timeout;
+ DEFINE_WAIT(wait);
+ int err = -ERESTARTSYS;
+
+ if (flags & MSG_DONTWAIT) {
+ return -EAGAIN;
+ }
+
+ set_bit(SOCK_ASYNC_WAITDATA, &sk->sk_socket->flags);
+
+ for (;;) {
+ if (signal_pending(current))
+ break;
+ prepare_to_wait(sk_sleep(sk), &wait, TASK_INTERRUPTIBLE);
+ timeout = MAX_SCHEDULE_TIMEOUT;
+ if (sk_wait_event(sk, &timeout, ctx->used)) {
+ err = 0;
+ break;
+ }
+ }
+ finish_wait(sk_sleep(sk), &wait);
+
+ clear_bit(SOCK_ASYNC_WAITDATA, &sk->sk_socket->flags);
+
+ return err;
+}
+
+static void skcipher_data_wakeup(struct sock *sk)
+{
+ struct alg_sock *ask = alg_sk(sk);
+ struct skcipher_ctx *ctx = ask->private;
+ struct socket_wq *wq;
+
+ if (!ctx->used)
+ return;
+
+ rcu_read_lock();
+ wq = rcu_dereference(sk->sk_wq);
+ if (wq_has_sleeper(wq))
+ wake_up_interruptible_sync_poll(&wq->wait, POLLOUT |
+ POLLRDNORM |
+ POLLRDBAND);
+ sk_wake_async(sk, SOCK_WAKE_SPACE, POLL_OUT);
+ rcu_read_unlock();
+}
+
+static int skcipher_sendmsg(struct kiocb *unused, struct socket *sock,
+ struct msghdr *msg, size_t size)
+{
+ struct sock *sk = sock->sk;
+ struct alg_sock *ask = alg_sk(sk);
+ struct skcipher_ctx *ctx = ask->private;
+ struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(&ctx->req);
+ unsigned ivsize = crypto_ablkcipher_ivsize(tfm);
+ struct skcipher_sg_list *sgl;
+ struct af_alg_control con = {};
+ long copied = 0;
+ bool enc = 0;
+ int err;
+ int i;
+
+ if (msg->msg_controllen) {
+ err = af_alg_cmsg_send(msg, &con);
+ if (err)
+ return err;
+
+ switch (con.op) {
+ case ALG_OP_ENCRYPT:
+ enc = 1;
+ break;
+ case ALG_OP_DECRYPT:
+ enc = 0;
+ break;
+ default:
+ return -EINVAL;
+ }
+
+ if (con.iv && con.iv->ivlen != ivsize)
+ return -EINVAL;
+ }
+
+ err = -EINVAL;
+
+ lock_sock(sk);
+ if (!ctx->more && ctx->used)
+ goto unlock;
+
+ if (!ctx->used) {
+ ctx->enc = enc;
+ if (con.iv)
+ memcpy(ctx->iv, con.iv->iv, ivsize);
+ }
+
+ while (size) {
+ struct scatterlist *sg;
+ unsigned long len = size;
+ int plen;
+
+ if (ctx->merge) {
+ sgl = list_entry(ctx->tsgl.prev,
+ struct skcipher_sg_list, list);
+ sg = sgl->sg + sgl->cur - 1;
+ len = min_t(unsigned long, len,
+ PAGE_SIZE - sg->offset - sg->length);
+
+ err = memcpy_fromiovec(page_address(sg_page(sg)) +
+ sg->offset + sg->length,
+ msg->msg_iov, len);
+ if (err)
+ goto unlock;
+
+ sg->length += len;
+ ctx->merge = (sg->offset + sg->length) &
+ (PAGE_SIZE - 1);
+
+ ctx->used += len;
+ copied += len;
+ size -= len;
+ continue;
+ }
+
+ if (!skcipher_writable(sk)) {
+ err = skcipher_wait_for_wmem(sk, msg->msg_flags);
+ if (err)
+ goto unlock;
+ }
+
+ len = min_t(unsigned long, len, skcipher_sndbuf(sk));
+
+ err = skcipher_alloc_sgl(sk);
+ if (err)
+ goto unlock;
+
+ sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list);
+ sg = sgl->sg;
+ do {
+ i = sgl->cur;
+ plen = min_t(int, len, PAGE_SIZE);
+
+ sg_assign_page(sg + i, alloc_page(GFP_KERNEL));
+ err = -ENOMEM;
+ if (!sg_page(sg + i))
+ goto unlock;
+
+ err = memcpy_fromiovec(page_address(sg_page(sg + i)),
+ msg->msg_iov, plen);
+ if (err) {
+ __free_page(sg_page(sg + i));
+ sg_assign_page(sg + i, NULL);
+ goto unlock;
+ }
+
+ sg[i].length = plen;
+ len -= plen;
+ ctx->used += plen;
+ copied += plen;
+ size -= plen;
+ sgl->cur++;
+ } while (len && sgl->cur < MAX_SGL_ENTS);
+
+ ctx->merge = plen & (PAGE_SIZE - 1);
+ }
+
+ err = 0;
+
+ ctx->more = msg->msg_flags & MSG_MORE;
+ if (!ctx->more && !list_empty(&ctx->tsgl))
+ sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list);
+
+unlock:
+ skcipher_data_wakeup(sk);
+ release_sock(sk);
+
+ return copied ?: err;
+}
+
+static ssize_t skcipher_sendpage(struct socket *sock, struct page *page,
+ int offset, size_t size, int flags)
+{
+ struct sock *sk = sock->sk;
+ struct alg_sock *ask = alg_sk(sk);
+ struct skcipher_ctx *ctx = ask->private;
+ struct skcipher_sg_list *sgl;
+ int err = -EINVAL;
+
+ if (flags & MSG_SENDPAGE_NOTLAST)
+ flags |= MSG_MORE;
+
+ lock_sock(sk);
+ if (!ctx->more && ctx->used)
+ goto unlock;
+
+ if (!size)
+ goto done;
+
+ if (!skcipher_writable(sk)) {
+ err = skcipher_wait_for_wmem(sk, flags);
+ if (err)
+ goto unlock;
+ }
+
+ err = skcipher_alloc_sgl(sk);
+ if (err)
+ goto unlock;
+
+ ctx->merge = 0;
+ sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list);
+
+ get_page(page);
+ sg_set_page(sgl->sg + sgl->cur, page, size, offset);
+ sgl->cur++;
+ ctx->used += size;
+
+done:
+ ctx->more = flags & MSG_MORE;
+ if (!ctx->more && !list_empty(&ctx->tsgl))
+ sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list);
+
+unlock:
+ skcipher_data_wakeup(sk);
+ release_sock(sk);
+
+ return err ?: size;
+}
+
+static int skcipher_recvmsg(struct kiocb *unused, struct socket *sock,
+ struct msghdr *msg, size_t ignored, int flags)
+{
+ struct sock *sk = sock->sk;
+ struct alg_sock *ask = alg_sk(sk);
+ struct skcipher_ctx *ctx = ask->private;
+ unsigned bs = crypto_ablkcipher_blocksize(crypto_ablkcipher_reqtfm(
+ &ctx->req));
+ struct skcipher_sg_list *sgl;
+ struct scatterlist *sg;
+ unsigned long iovlen;
+ struct iovec *iov;
+ int err = -EAGAIN;
+ int used;
+ long copied = 0;
+
+ lock_sock(sk);
+ for (iov = msg->msg_iov, iovlen = msg->msg_iovlen; iovlen > 0;
+ iovlen--, iov++) {
+ unsigned long seglen = iov->iov_len;
+ char __user *from = iov->iov_base;
+
+ while (seglen) {
+ sgl = list_first_entry(&ctx->tsgl,
+ struct skcipher_sg_list, list);
+ sg = sgl->sg;
+
+ while (!sg->length)
+ sg++;
+
+ used = ctx->used;
+ if (!used) {
+ err = skcipher_wait_for_data(sk, flags);
+ if (err)
+ goto unlock;
+ }
+
+ used = min_t(unsigned long, used, seglen);
+
+ used = af_alg_make_sg(&ctx->rsgl, from, used, 1);
+ err = used;
+ if (err < 0)
+ goto unlock;
+
+ if (ctx->more || used < ctx->used)
+ used -= used % bs;
+
+ err = -EINVAL;
+ if (!used)
+ goto free;
+
+ ablkcipher_request_set_crypt(&ctx->req, sg,
+ ctx->rsgl.sg, used,
+ ctx->iv);
+
+ err = af_alg_wait_for_completion(
+ ctx->enc ?
+ crypto_ablkcipher_encrypt(&ctx->req) :
+ crypto_ablkcipher_decrypt(&ctx->req),
+ &ctx->completion);
+
+free:
+ af_alg_free_sg(&ctx->rsgl);
+
+ if (err)
+ goto unlock;
+
+ copied += used;
+ from += used;
+ seglen -= used;
+ skcipher_pull_sgl(sk, used);
+ }
+ }
+
+ err = 0;
+
+unlock:
+ skcipher_wmem_wakeup(sk);
+ release_sock(sk);
+
+ return copied ?: err;
+}
+
+
+static unsigned int skcipher_poll(struct file *file, struct socket *sock,
+ poll_table *wait)
+{
+ struct sock *sk = sock->sk;
+ struct alg_sock *ask = alg_sk(sk);
+ struct skcipher_ctx *ctx = ask->private;
+ unsigned int mask;
+
+ sock_poll_wait(file, sk_sleep(sk), wait);
+ mask = 0;
+
+ if (ctx->used)
+ mask |= POLLIN | POLLRDNORM;
+
+ if (skcipher_writable(sk))
+ mask |= POLLOUT | POLLWRNORM | POLLWRBAND;
+
+ return mask;
+}
+
+static struct proto_ops algif_skcipher_ops = {
+ .family = PF_ALG,
+
+ .connect = sock_no_connect,
+ .socketpair = sock_no_socketpair,
+ .getname = sock_no_getname,
+ .ioctl = sock_no_ioctl,
+ .listen = sock_no_listen,
+ .shutdown = sock_no_shutdown,
+ .getsockopt = sock_no_getsockopt,
+ .mmap = sock_no_mmap,
+ .bind = sock_no_bind,
+ .accept = sock_no_accept,
+ .setsockopt = sock_no_setsockopt,
+
+ .release = af_alg_release,
+ .sendmsg = skcipher_sendmsg,
+ .sendpage = skcipher_sendpage,
+ .recvmsg = skcipher_recvmsg,
+ .poll = skcipher_poll,
+};
+
+static void *skcipher_bind(const char *name, u32 type, u32 mask)
+{
+ return crypto_alloc_ablkcipher(name, type, mask);
+}
+
+static void skcipher_release(void *private)
+{
+ crypto_free_ablkcipher(private);
+}
+
+static int skcipher_setkey(void *private, const u8 *key, unsigned int keylen)
+{
+ return crypto_ablkcipher_setkey(private, key, keylen);
+}
+
+static void skcipher_sock_destruct(struct sock *sk)
+{
+ struct alg_sock *ask = alg_sk(sk);
+ struct skcipher_ctx *ctx = ask->private;
+ struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(&ctx->req);
+
+ skcipher_free_sgl(sk);
+ sock_kfree_s(sk, ctx->iv, crypto_ablkcipher_ivsize(tfm));
+ sock_kfree_s(sk, ctx, ctx->len);
+ af_alg_release_parent(sk);
+}
+
+static int skcipher_accept_parent(void *private, struct sock *sk)
+{
+ struct skcipher_ctx *ctx;
+ struct alg_sock *ask = alg_sk(sk);
+ unsigned int len = sizeof(*ctx) + crypto_ablkcipher_reqsize(private);
+
+ ctx = sock_kmalloc(sk, len, GFP_KERNEL);
+ if (!ctx)
+ return -ENOMEM;
+
+ ctx->iv = sock_kmalloc(sk, crypto_ablkcipher_ivsize(private),
+ GFP_KERNEL);
+ if (!ctx->iv) {
+ sock_kfree_s(sk, ctx, len);
+ return -ENOMEM;
+ }
+
+ memset(ctx->iv, 0, crypto_ablkcipher_ivsize(private));
+
+ INIT_LIST_HEAD(&ctx->tsgl);
+ ctx->len = len;
+ ctx->used = 0;
+ ctx->more = 0;
+ ctx->merge = 0;
+ ctx->enc = 0;
+ af_alg_init_completion(&ctx->completion);
+
+ ask->private = ctx;
+
+ ablkcipher_request_set_tfm(&ctx->req, private);
+ ablkcipher_request_set_callback(&ctx->req, CRYPTO_TFM_REQ_MAY_BACKLOG,
+ af_alg_complete, &ctx->completion);
+
+ sk->sk_destruct = skcipher_sock_destruct;
+
+ return 0;
+}
+
+static const struct af_alg_type algif_type_skcipher = {
+ .bind = skcipher_bind,
+ .release = skcipher_release,
+ .setkey = skcipher_setkey,
+ .accept = skcipher_accept_parent,
+ .ops = &algif_skcipher_ops,
+ .name = "skcipher",
+ .owner = THIS_MODULE
+};
+
+static int __init algif_skcipher_init(void)
+{
+ return af_alg_register_type(&algif_type_skcipher);
+}
+
+static void __exit algif_skcipher_exit(void)
+{
+ int err = af_alg_unregister_type(&algif_type_skcipher);
+ BUG_ON(err);
+}
+
+module_init(algif_skcipher_init);
+module_exit(algif_skcipher_exit);
+MODULE_LICENSE("GPL");
diff --git a/crypto/ansi_cprng.c b/crypto/ansi_cprng.c
index 0fac8ffc2fb..666f1962a16 100644
--- a/crypto/ansi_cprng.c
+++ b/crypto/ansi_cprng.c
@@ -83,9 +83,9 @@ static void xor_vectors(unsigned char *in1, unsigned char *in2,
}
/*
* Returns DEFAULT_BLK_SZ bytes of random data per call
- * returns 0 if generation succeded, <0 if something went wrong
+ * returns 0 if generation succeeded, <0 if something went wrong
*/
-static int _get_more_prng_bytes(struct prng_context *ctx)
+static int _get_more_prng_bytes(struct prng_context *ctx, int cont_test)
{
int i;
unsigned char tmp[DEFAULT_BLK_SZ];
@@ -132,9 +132,15 @@ static int _get_more_prng_bytes(struct prng_context *ctx)
*/
if (!memcmp(ctx->rand_data, ctx->last_rand_data,
DEFAULT_BLK_SZ)) {
+ if (cont_test) {
+ panic("cprng %p Failed repetition check!\n",
+ ctx);
+ }
+
printk(KERN_ERR
"ctx %p Failed repetition check!\n",
ctx);
+
ctx->flags |= PRNG_NEED_RESET;
return -EINVAL;
}
@@ -179,18 +185,15 @@ static int _get_more_prng_bytes(struct prng_context *ctx)
}
/* Our exported functions */
-static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
+static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx,
+ int do_cont_test)
{
- unsigned long flags;
unsigned char *ptr = buf;
unsigned int byte_count = (unsigned int)nbytes;
int err;
- if (nbytes < 0)
- return -EINVAL;
-
- spin_lock_irqsave(&ctx->prng_lock, flags);
+ spin_lock_bh(&ctx->prng_lock);
err = -EINVAL;
if (ctx->flags & PRNG_NEED_RESET)
@@ -215,7 +218,7 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
remainder:
if (ctx->rand_data_valid == DEFAULT_BLK_SZ) {
- if (_get_more_prng_bytes(ctx) < 0) {
+ if (_get_more_prng_bytes(ctx, do_cont_test) < 0) {
memset(buf, 0, nbytes);
err = -EINVAL;
goto done;
@@ -227,11 +230,11 @@ remainder:
*/
if (byte_count < DEFAULT_BLK_SZ) {
empty_rbuf:
- for (; ctx->rand_data_valid < DEFAULT_BLK_SZ;
- ctx->rand_data_valid++) {
+ while (ctx->rand_data_valid < DEFAULT_BLK_SZ) {
*ptr = ctx->rand_data[ctx->rand_data_valid];
ptr++;
byte_count--;
+ ctx->rand_data_valid++;
if (byte_count == 0)
goto done;
}
@@ -242,7 +245,7 @@ empty_rbuf:
*/
for (; byte_count >= DEFAULT_BLK_SZ; byte_count -= DEFAULT_BLK_SZ) {
if (ctx->rand_data_valid == DEFAULT_BLK_SZ) {
- if (_get_more_prng_bytes(ctx) < 0) {
+ if (_get_more_prng_bytes(ctx, do_cont_test) < 0) {
memset(buf, 0, nbytes);
err = -EINVAL;
goto done;
@@ -262,7 +265,7 @@ empty_rbuf:
goto remainder;
done:
- spin_unlock_irqrestore(&ctx->prng_lock, flags);
+ spin_unlock_bh(&ctx->prng_lock);
dbgprint(KERN_CRIT "returning %d from get_prng_bytes in context %p\n",
err, ctx);
return err;
@@ -278,10 +281,9 @@ static int reset_prng_context(struct prng_context *ctx,
unsigned char *V, unsigned char *DT)
{
int ret;
- int rc = -EINVAL;
unsigned char *prng_key;
- spin_lock(&ctx->prng_lock);
+ spin_lock_bh(&ctx->prng_lock);
ctx->flags |= PRNG_NEED_RESET;
prng_key = (key != NULL) ? key : (unsigned char *)DEFAULT_PRNG_KEY;
@@ -302,34 +304,20 @@ static int reset_prng_context(struct prng_context *ctx,
memset(ctx->rand_data, 0, DEFAULT_BLK_SZ);
memset(ctx->last_rand_data, 0, DEFAULT_BLK_SZ);
- if (ctx->tfm)
- crypto_free_cipher(ctx->tfm);
-
- ctx->tfm = crypto_alloc_cipher("aes", 0, 0);
- if (IS_ERR(ctx->tfm)) {
- dbgprint(KERN_CRIT "Failed to alloc tfm for context %p\n",
- ctx);
- ctx->tfm = NULL;
- goto out;
- }
-
ctx->rand_data_valid = DEFAULT_BLK_SZ;
ret = crypto_cipher_setkey(ctx->tfm, prng_key, klen);
if (ret) {
dbgprint(KERN_CRIT "PRNG: setkey() failed flags=%x\n",
crypto_cipher_get_flags(ctx->tfm));
- crypto_free_cipher(ctx->tfm);
goto out;
}
- rc = 0;
+ ret = 0;
ctx->flags &= ~PRNG_NEED_RESET;
out:
- spin_unlock(&ctx->prng_lock);
-
- return rc;
-
+ spin_unlock_bh(&ctx->prng_lock);
+ return ret;
}
static int cprng_init(struct crypto_tfm *tfm)
@@ -337,8 +325,23 @@ static int cprng_init(struct crypto_tfm *tfm)
struct prng_context *ctx = crypto_tfm_ctx(tfm);
spin_lock_init(&ctx->prng_lock);
+ ctx->tfm = crypto_alloc_cipher("aes", 0, 0);
+ if (IS_ERR(ctx->tfm)) {
+ dbgprint(KERN_CRIT "Failed to alloc tfm for context %p\n",
+ ctx);
+ return PTR_ERR(ctx->tfm);
+ }
+
+ if (reset_prng_context(ctx, NULL, DEFAULT_PRNG_KSZ, NULL, NULL) < 0)
+ return -EINVAL;
- return reset_prng_context(ctx, NULL, DEFAULT_PRNG_KSZ, NULL, NULL);
+ /*
+ * after allocation, we should always force the user to reset
+ * so they don't inadvertently use the insecure default values
+ * without specifying them intentially
+ */
+ ctx->flags |= PRNG_NEED_RESET;
+ return 0;
}
static void cprng_exit(struct crypto_tfm *tfm)
@@ -351,7 +354,7 @@ static int cprng_get_random(struct crypto_rng *tfm, u8 *rdata,
{
struct prng_context *prng = crypto_rng_ctx(tfm);
- return get_prng_bytes(rdata, dlen, prng);
+ return get_prng_bytes(rdata, dlen, prng, 0);
}
/*
@@ -379,7 +382,45 @@ static int cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen)
return 0;
}
-static struct crypto_alg rng_alg = {
+#ifdef CONFIG_CRYPTO_FIPS
+static int fips_cprng_get_random(struct crypto_rng *tfm, u8 *rdata,
+ unsigned int dlen)
+{
+ struct prng_context *prng = crypto_rng_ctx(tfm);
+
+ return get_prng_bytes(rdata, dlen, prng, 1);
+}
+
+static int fips_cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen)
+{
+ u8 rdata[DEFAULT_BLK_SZ];
+ u8 *key = seed + DEFAULT_BLK_SZ;
+ int rc;
+
+ struct prng_context *prng = crypto_rng_ctx(tfm);
+
+ if (slen < DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ)
+ return -EINVAL;
+
+ /* fips strictly requires seed != key */
+ if (!memcmp(seed, key, DEFAULT_PRNG_KSZ))
+ return -EINVAL;
+
+ rc = cprng_reset(tfm, seed, slen);
+
+ if (!rc)
+ goto out;
+
+ /* this primes our continuity test */
+ rc = get_prng_bytes(rdata, DEFAULT_BLK_SZ, prng, 0);
+ prng->rand_data_valid = DEFAULT_BLK_SZ;
+
+out:
+ return rc;
+}
+#endif
+
+static struct crypto_alg rng_algs[] = { {
.cra_name = "stdrng",
.cra_driver_name = "ansi_cprng",
.cra_priority = 100,
@@ -387,7 +428,6 @@ static struct crypto_alg rng_alg = {
.cra_ctxsize = sizeof(struct prng_context),
.cra_type = &crypto_rng_type,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(rng_alg.cra_list),
.cra_init = cprng_init,
.cra_exit = cprng_exit,
.cra_u = {
@@ -397,29 +437,36 @@ static struct crypto_alg rng_alg = {
.seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ,
}
}
-};
-
+#ifdef CONFIG_CRYPTO_FIPS
+}, {
+ .cra_name = "fips(ansi_cprng)",
+ .cra_driver_name = "fips_ansi_cprng",
+ .cra_priority = 300,
+ .cra_flags = CRYPTO_ALG_TYPE_RNG,
+ .cra_ctxsize = sizeof(struct prng_context),
+ .cra_type = &crypto_rng_type,
+ .cra_module = THIS_MODULE,
+ .cra_init = cprng_init,
+ .cra_exit = cprng_exit,
+ .cra_u = {
+ .rng = {
+ .rng_make_random = fips_cprng_get_random,
+ .rng_reset = fips_cprng_reset,
+ .seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ,
+ }
+ }
+#endif
+} };
/* Module initalization */
static int __init prng_mod_init(void)
{
- int ret = 0;
-
- if (fips_enabled)
- rng_alg.cra_priority += 200;
-
- ret = crypto_register_alg(&rng_alg);
-
- if (ret)
- goto out;
-out:
- return 0;
+ return crypto_register_algs(rng_algs, ARRAY_SIZE(rng_algs));
}
static void __exit prng_mod_fini(void)
{
- crypto_unregister_alg(&rng_alg);
- return;
+ crypto_unregister_algs(rng_algs, ARRAY_SIZE(rng_algs));
}
MODULE_LICENSE("GPL");
diff --git a/crypto/anubis.c b/crypto/anubis.c
index e42c3a8ba4a..008c8a4fb67 100644
--- a/crypto/anubis.c
+++ b/crypto/anubis.c
@@ -469,14 +469,13 @@ static int anubis_setkey(struct crypto_tfm *tfm, const u8 *in_key,
u32 kappa[ANUBIS_MAX_N];
u32 inter[ANUBIS_MAX_N];
- switch (key_len)
- {
+ switch (key_len) {
case 16: case 20: case 24: case 28:
case 32: case 36: case 40:
break;
default:
*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
- return - EINVAL;
+ return -EINVAL;
}
ctx->key_len = key_len * 8;
@@ -530,23 +529,24 @@ static int anubis_setkey(struct crypto_tfm *tfm, const u8 *in_key,
/*
* compute kappa^{r+1} from kappa^r:
*/
- if (r == R) {
+ if (r == R)
break;
- }
for (i = 0; i < N; i++) {
int j = i;
inter[i] = T0[(kappa[j--] >> 24) ];
- if (j < 0) j = N - 1;
+ if (j < 0)
+ j = N - 1;
inter[i] ^= T1[(kappa[j--] >> 16) & 0xff];
- if (j < 0) j = N - 1;
+ if (j < 0)
+ j = N - 1;
inter[i] ^= T2[(kappa[j--] >> 8) & 0xff];
- if (j < 0) j = N - 1;
+ if (j < 0)
+ j = N - 1;
inter[i] ^= T3[(kappa[j ] ) & 0xff];
}
kappa[0] = inter[0] ^ rc[r];
- for (i = 1; i < N; i++) {
+ for (i = 1; i < N; i++)
kappa[i] = inter[i];
- }
}
/*
@@ -678,7 +678,6 @@ static struct crypto_alg anubis_alg = {
.cra_ctxsize = sizeof (struct anubis_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(anubis_alg.cra_list),
.cra_u = { .cipher = {
.cia_min_keysize = ANUBIS_MIN_KEY_SIZE,
.cia_max_keysize = ANUBIS_MAX_KEY_SIZE,
@@ -690,7 +689,7 @@ static struct crypto_alg anubis_alg = {
static int __init anubis_mod_init(void)
{
int ret = 0;
-
+
ret = crypto_register_alg(&anubis_alg);
return ret;
}
diff --git a/crypto/api.c b/crypto/api.c
index 38a2bc02a98..a2b39c5f364 100644
--- a/crypto/api.c
+++ b/crypto/api.c
@@ -10,7 +10,7 @@
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
+ * Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
@@ -34,11 +34,7 @@ EXPORT_SYMBOL_GPL(crypto_alg_sem);
BLOCKING_NOTIFIER_HEAD(crypto_chain);
EXPORT_SYMBOL_GPL(crypto_chain);
-static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg)
-{
- atomic_inc(&alg->cra_refcnt);
- return alg;
-}
+static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
{
@@ -150,8 +146,11 @@ static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
}
up_write(&crypto_alg_sem);
- if (alg != &larval->alg)
+ if (alg != &larval->alg) {
kfree(larval);
+ if (crypto_is_larval(alg))
+ alg = crypto_larval_wait(alg);
+ }
return alg;
}
@@ -217,13 +216,11 @@ struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
alg = crypto_alg_lookup(name, type, mask);
if (!alg) {
- char tmp[CRYPTO_MAX_ALG_NAME];
+ request_module("%s", name);
- request_module(name);
-
- if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask) &&
- snprintf(tmp, sizeof(tmp), "%s-all", name) < sizeof(tmp))
- request_module(tmp);
+ if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
+ CRYPTO_ALG_NEED_FALLBACK))
+ request_module("%s-all", name);
alg = crypto_alg_lookup(name, type, mask);
}
@@ -255,7 +252,7 @@ struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
struct crypto_alg *larval;
int ok;
- if (!(mask & CRYPTO_ALG_TESTED)) {
+ if (!((type | mask) & CRYPTO_ALG_TESTED)) {
type |= CRYPTO_ALG_TESTED;
mask |= CRYPTO_ALG_TESTED;
}
@@ -287,21 +284,14 @@ static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
switch (crypto_tfm_alg_type(tfm)) {
case CRYPTO_ALG_TYPE_CIPHER:
return crypto_init_cipher_ops(tfm);
-
- case CRYPTO_ALG_TYPE_DIGEST:
- if ((mask & CRYPTO_ALG_TYPE_HASH_MASK) !=
- CRYPTO_ALG_TYPE_HASH_MASK)
- return crypto_init_digest_ops_async(tfm);
- else
- return crypto_init_digest_ops(tfm);
case CRYPTO_ALG_TYPE_COMPRESS:
return crypto_init_compress_ops(tfm);
-
+
default:
break;
}
-
+
BUG();
return -EINVAL;
}
@@ -320,18 +310,13 @@ static void crypto_exit_ops(struct crypto_tfm *tfm)
case CRYPTO_ALG_TYPE_CIPHER:
crypto_exit_cipher_ops(tfm);
break;
-
- case CRYPTO_ALG_TYPE_DIGEST:
- crypto_exit_digest_ops(tfm);
- break;
-
+
case CRYPTO_ALG_TYPE_COMPRESS:
crypto_exit_compress_ops(tfm);
break;
-
+
default:
BUG();
-
}
}
@@ -351,11 +336,7 @@ static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
case CRYPTO_ALG_TYPE_CIPHER:
len += crypto_cipher_ctxsize(alg);
break;
-
- case CRYPTO_ALG_TYPE_DIGEST:
- len += crypto_digest_ctxsize(alg);
- break;
-
+
case CRYPTO_ALG_TYPE_COMPRESS:
len += crypto_compress_ctxsize(alg);
break;
@@ -415,7 +396,7 @@ EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
* @mask: Mask for type comparison
*
* This function should not be used by new algorithm types.
- * Plesae use crypto_alloc_tfm instead.
+ * Please use crypto_alloc_tfm instead.
*
* crypto_alloc_base() will first attempt to locate an already loaded
* algorithm. If that fails and the kernel supports dynamically loadable
@@ -464,8 +445,8 @@ err:
}
EXPORT_SYMBOL_GPL(crypto_alloc_base);
-struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
- const struct crypto_type *frontend)
+void *crypto_create_tfm(struct crypto_alg *alg,
+ const struct crypto_type *frontend)
{
char *mem;
struct crypto_tfm *tfm = NULL;
@@ -474,7 +455,7 @@ struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
int err = -ENOMEM;
tfmsize = frontend->tfmsize;
- total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend);
+ total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
mem = kzalloc(total, GFP_KERNEL);
if (mem == NULL)
@@ -483,7 +464,7 @@ struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
tfm = (struct crypto_tfm *)(mem + tfmsize);
tfm->__crt_alg = alg;
- err = frontend->init_tfm(tfm, frontend);
+ err = frontend->init_tfm(tfm);
if (err)
goto out_free_tfm;
@@ -499,12 +480,33 @@ out_free_tfm:
crypto_shoot_alg(alg);
kfree(mem);
out_err:
- tfm = ERR_PTR(err);
+ mem = ERR_PTR(err);
out:
- return tfm;
+ return mem;
}
EXPORT_SYMBOL_GPL(crypto_create_tfm);
+struct crypto_alg *crypto_find_alg(const char *alg_name,
+ const struct crypto_type *frontend,
+ u32 type, u32 mask)
+{
+ struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
+ crypto_alg_mod_lookup;
+
+ if (frontend) {
+ type &= frontend->maskclear;
+ mask &= frontend->maskclear;
+ type |= frontend->type;
+ mask |= frontend->maskset;
+
+ if (frontend->lookup)
+ lookup = frontend->lookup;
+ }
+
+ return lookup(alg_name, type, mask);
+}
+EXPORT_SYMBOL_GPL(crypto_find_alg);
+
/*
* crypto_alloc_tfm - Locate algorithm and allocate transform
* @alg_name: Name of algorithm
@@ -525,25 +527,16 @@ EXPORT_SYMBOL_GPL(crypto_create_tfm);
*
* In case of error the return value is an error pointer.
*/
-struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
- const struct crypto_type *frontend,
- u32 type, u32 mask)
+void *crypto_alloc_tfm(const char *alg_name,
+ const struct crypto_type *frontend, u32 type, u32 mask)
{
- struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
- struct crypto_tfm *tfm;
+ void *tfm;
int err;
- type &= frontend->maskclear;
- mask &= frontend->maskclear;
- type |= frontend->type;
- mask |= frontend->maskset;
-
- lookup = frontend->lookup ?: crypto_alg_mod_lookup;
-
for (;;) {
struct crypto_alg *alg;
- alg = lookup(alg_name, type, mask);
+ alg = crypto_find_alg(alg_name, frontend, type, mask);
if (IS_ERR(alg)) {
err = PTR_ERR(alg);
goto err;
@@ -580,20 +573,17 @@ EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
{
struct crypto_alg *alg;
- int size;
if (unlikely(!mem))
return;
alg = tfm->__crt_alg;
- size = ksize(mem);
if (!tfm->exit && alg->cra_exit)
alg->cra_exit(tfm);
crypto_exit_ops(tfm);
crypto_mod_put(alg);
- memset(mem, 0, size);
- kfree(mem);
+ kzfree(mem);
}
EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
@@ -601,12 +591,12 @@ int crypto_has_alg(const char *name, u32 type, u32 mask)
{
int ret = 0;
struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
-
+
if (!IS_ERR(alg)) {
crypto_mod_put(alg);
ret = 1;
}
-
+
return ret;
}
EXPORT_SYMBOL_GPL(crypto_has_alg);
diff --git a/crypto/arc4.c b/crypto/arc4.c
index 8be47e13a9e..5a772c3657d 100644
--- a/crypto/arc4.c
+++ b/crypto/arc4.c
@@ -1,4 +1,4 @@
-/*
+/*
* Cryptographic API
*
* ARC4 Cipher Algorithm
@@ -11,17 +11,19 @@
* (at your option) any later version.
*
*/
+
#include <linux/module.h>
#include <linux/init.h>
#include <linux/crypto.h>
+#include <crypto/algapi.h>
#define ARC4_MIN_KEY_SIZE 1
#define ARC4_MAX_KEY_SIZE 256
#define ARC4_BLOCK_SIZE 1
struct arc4_ctx {
- u8 S[256];
- u8 x, y;
+ u32 S[256];
+ u32 x, y;
};
static int arc4_set_key(struct crypto_tfm *tfm, const u8 *in_key,
@@ -33,67 +35,129 @@ static int arc4_set_key(struct crypto_tfm *tfm, const u8 *in_key,
ctx->x = 1;
ctx->y = 0;
- for(i = 0; i < 256; i++)
+ for (i = 0; i < 256; i++)
ctx->S[i] = i;
- for(i = 0; i < 256; i++)
- {
- u8 a = ctx->S[i];
+ for (i = 0; i < 256; i++) {
+ u32 a = ctx->S[i];
j = (j + in_key[k] + a) & 0xff;
ctx->S[i] = ctx->S[j];
ctx->S[j] = a;
- if(++k >= key_len)
+ if (++k >= key_len)
k = 0;
}
return 0;
}
-static void arc4_crypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
+static void arc4_crypt(struct arc4_ctx *ctx, u8 *out, const u8 *in,
+ unsigned int len)
{
- struct arc4_ctx *ctx = crypto_tfm_ctx(tfm);
+ u32 *const S = ctx->S;
+ u32 x, y, a, b;
+ u32 ty, ta, tb;
+
+ if (len == 0)
+ return;
- u8 *const S = ctx->S;
- u8 x = ctx->x;
- u8 y = ctx->y;
- u8 a, b;
+ x = ctx->x;
+ y = ctx->y;
a = S[x];
y = (y + a) & 0xff;
b = S[y];
- S[x] = b;
- S[y] = a;
- x = (x + 1) & 0xff;
- *out++ = *in ^ S[(a + b) & 0xff];
+
+ do {
+ S[y] = a;
+ a = (a + b) & 0xff;
+ S[x] = b;
+ x = (x + 1) & 0xff;
+ ta = S[x];
+ ty = (y + ta) & 0xff;
+ tb = S[ty];
+ *out++ = *in++ ^ S[a];
+ if (--len == 0)
+ break;
+ y = ty;
+ a = ta;
+ b = tb;
+ } while (true);
ctx->x = x;
ctx->y = y;
}
-static struct crypto_alg arc4_alg = {
+static void arc4_crypt_one(struct crypto_tfm *tfm, u8 *out, const u8 *in)
+{
+ arc4_crypt(crypto_tfm_ctx(tfm), out, in, 1);
+}
+
+static int ecb_arc4_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
+ struct scatterlist *src, unsigned int nbytes)
+{
+ struct arc4_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
+ struct blkcipher_walk walk;
+ int err;
+
+ blkcipher_walk_init(&walk, dst, src, nbytes);
+
+ err = blkcipher_walk_virt(desc, &walk);
+
+ while (walk.nbytes > 0) {
+ u8 *wsrc = walk.src.virt.addr;
+ u8 *wdst = walk.dst.virt.addr;
+
+ arc4_crypt(ctx, wdst, wsrc, walk.nbytes);
+
+ err = blkcipher_walk_done(desc, &walk, 0);
+ }
+
+ return err;
+}
+
+static struct crypto_alg arc4_algs[2] = { {
.cra_name = "arc4",
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = ARC4_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct arc4_ctx),
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(arc4_alg.cra_list),
- .cra_u = { .cipher = {
- .cia_min_keysize = ARC4_MIN_KEY_SIZE,
- .cia_max_keysize = ARC4_MAX_KEY_SIZE,
- .cia_setkey = arc4_set_key,
- .cia_encrypt = arc4_crypt,
- .cia_decrypt = arc4_crypt } }
-};
+ .cra_u = {
+ .cipher = {
+ .cia_min_keysize = ARC4_MIN_KEY_SIZE,
+ .cia_max_keysize = ARC4_MAX_KEY_SIZE,
+ .cia_setkey = arc4_set_key,
+ .cia_encrypt = arc4_crypt_one,
+ .cia_decrypt = arc4_crypt_one,
+ },
+ },
+}, {
+ .cra_name = "ecb(arc4)",
+ .cra_priority = 100,
+ .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
+ .cra_blocksize = ARC4_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct arc4_ctx),
+ .cra_alignmask = 0,
+ .cra_type = &crypto_blkcipher_type,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
+ .blkcipher = {
+ .min_keysize = ARC4_MIN_KEY_SIZE,
+ .max_keysize = ARC4_MAX_KEY_SIZE,
+ .setkey = arc4_set_key,
+ .encrypt = ecb_arc4_crypt,
+ .decrypt = ecb_arc4_crypt,
+ },
+ },
+} };
static int __init arc4_init(void)
{
- return crypto_register_alg(&arc4_alg);
+ return crypto_register_algs(arc4_algs, ARRAY_SIZE(arc4_algs));
}
-
static void __exit arc4_exit(void)
{
- crypto_unregister_alg(&arc4_alg);
+ crypto_unregister_algs(arc4_algs, ARRAY_SIZE(arc4_algs));
}
module_init(arc4_init);
diff --git a/crypto/asymmetric_keys/.gitignore b/crypto/asymmetric_keys/.gitignore
new file mode 100644
index 00000000000..ee328374dba
--- /dev/null
+++ b/crypto/asymmetric_keys/.gitignore
@@ -0,0 +1 @@
+*-asn1.[ch]
diff --git a/crypto/asymmetric_keys/Kconfig b/crypto/asymmetric_keys/Kconfig
new file mode 100644
index 00000000000..03a6eb95ab5
--- /dev/null
+++ b/crypto/asymmetric_keys/Kconfig
@@ -0,0 +1,40 @@
+menuconfig ASYMMETRIC_KEY_TYPE
+ tristate "Asymmetric (public-key cryptographic) key type"
+ depends on KEYS
+ help
+ This option provides support for a key type that holds the data for
+ the asymmetric keys used for public key cryptographic operations such
+ as encryption, decryption, signature generation and signature
+ verification.
+
+if ASYMMETRIC_KEY_TYPE
+
+config ASYMMETRIC_PUBLIC_KEY_SUBTYPE
+ tristate "Asymmetric public-key crypto algorithm subtype"
+ select MPILIB
+ select PUBLIC_KEY_ALGO_RSA
+ select CRYPTO_HASH_INFO
+ help
+ This option provides support for asymmetric public key type handling.
+ If signature generation and/or verification are to be used,
+ appropriate hash algorithms (such as SHA-1) must be available.
+ ENOPKG will be reported if the requisite algorithm is unavailable.
+
+config PUBLIC_KEY_ALGO_RSA
+ tristate "RSA public-key algorithm"
+ select MPILIB_EXTRA
+ select MPILIB
+ help
+ This option enables support for the RSA algorithm (PKCS#1, RFC3447).
+
+config X509_CERTIFICATE_PARSER
+ tristate "X.509 certificate parser"
+ depends on ASYMMETRIC_PUBLIC_KEY_SUBTYPE
+ select ASN1
+ select OID_REGISTRY
+ help
+ This option procides support for parsing X.509 format blobs for key
+ data and provides the ability to instantiate a crypto key from a
+ public key packet found inside the certificate.
+
+endif # ASYMMETRIC_KEY_TYPE
diff --git a/crypto/asymmetric_keys/Makefile b/crypto/asymmetric_keys/Makefile
new file mode 100644
index 00000000000..0727204aab6
--- /dev/null
+++ b/crypto/asymmetric_keys/Makefile
@@ -0,0 +1,27 @@
+#
+# Makefile for asymmetric cryptographic keys
+#
+
+obj-$(CONFIG_ASYMMETRIC_KEY_TYPE) += asymmetric_keys.o
+
+asymmetric_keys-y := asymmetric_type.o signature.o
+
+obj-$(CONFIG_ASYMMETRIC_PUBLIC_KEY_SUBTYPE) += public_key.o
+obj-$(CONFIG_PUBLIC_KEY_ALGO_RSA) += rsa.o
+
+#
+# X.509 Certificate handling
+#
+obj-$(CONFIG_X509_CERTIFICATE_PARSER) += x509_key_parser.o
+x509_key_parser-y := \
+ x509-asn1.o \
+ x509_rsakey-asn1.o \
+ x509_cert_parser.o \
+ x509_public_key.o
+
+$(obj)/x509_cert_parser.o: $(obj)/x509-asn1.h $(obj)/x509_rsakey-asn1.h
+$(obj)/x509-asn1.o: $(obj)/x509-asn1.c $(obj)/x509-asn1.h
+$(obj)/x509_rsakey-asn1.o: $(obj)/x509_rsakey-asn1.c $(obj)/x509_rsakey-asn1.h
+
+clean-files += x509-asn1.c x509-asn1.h
+clean-files += x509_rsakey-asn1.c x509_rsakey-asn1.h
diff --git a/crypto/asymmetric_keys/asymmetric_keys.h b/crypto/asymmetric_keys/asymmetric_keys.h
new file mode 100644
index 00000000000..515b6343081
--- /dev/null
+++ b/crypto/asymmetric_keys/asymmetric_keys.h
@@ -0,0 +1,15 @@
+/* Internal definitions for asymmetric key type
+ *
+ * Copyright (C) 2012 Red Hat, Inc. All Rights Reserved.
+ * Written by David Howells (dhowells@redhat.com)
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public Licence
+ * as published by the Free Software Foundation; either version
+ * 2 of the Licence, or (at your option) any later version.
+ */
+
+static inline const char *asymmetric_key_id(const struct key *key)
+{
+ return key->type_data.p[1];
+}
diff --git a/crypto/asymmetric_keys/asymmetric_type.c b/crypto/asymmetric_keys/asymmetric_type.c
new file mode 100644
index 00000000000..b77eb530478
--- /dev/null
+++ b/crypto/asymmetric_keys/asymmetric_type.c
@@ -0,0 +1,275 @@
+/* Asymmetric public-key cryptography key type
+ *
+ * See Documentation/security/asymmetric-keys.txt
+ *
+ * Copyright (C) 2012 Red Hat, Inc. All Rights Reserved.
+ * Written by David Howells (dhowells@redhat.com)
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public Licence
+ * as published by the Free Software Foundation; either version
+ * 2 of the Licence, or (at your option) any later version.
+ */
+#include <keys/asymmetric-subtype.h>
+#include <keys/asymmetric-parser.h>
+#include <linux/seq_file.h>
+#include <linux/module.h>
+#include <linux/slab.h>
+#include "asymmetric_keys.h"
+
+MODULE_LICENSE("GPL");
+
+static LIST_HEAD(asymmetric_key_parsers);
+static DECLARE_RWSEM(asymmetric_key_parsers_sem);
+
+/*
+ * Match asymmetric keys on (part of) their name
+ * We have some shorthand methods for matching keys. We allow:
+ *
+ * "<desc>" - request a key by description
+ * "id:<id>" - request a key matching the ID
+ * "<subtype>:<id>" - request a key of a subtype
+ */
+static int asymmetric_key_match(const struct key *key, const void *description)
+{
+ const struct asymmetric_key_subtype *subtype = asymmetric_key_subtype(key);
+ const char *spec = description;
+ const char *id, *kid;
+ ptrdiff_t speclen;
+ size_t idlen, kidlen;
+
+ if (!subtype || !spec || !*spec)
+ return 0;
+
+ /* See if the full key description matches as is */
+ if (key->description && strcmp(key->description, description) == 0)
+ return 1;
+
+ /* All tests from here on break the criterion description into a
+ * specifier, a colon and then an identifier.
+ */
+ id = strchr(spec, ':');
+ if (!id)
+ return 0;
+
+ speclen = id - spec;
+ id++;
+
+ /* Anything after here requires a partial match on the ID string */
+ kid = asymmetric_key_id(key);
+ if (!kid)
+ return 0;
+
+ idlen = strlen(id);
+ kidlen = strlen(kid);
+ if (idlen > kidlen)
+ return 0;
+
+ kid += kidlen - idlen;
+ if (strcasecmp(id, kid) != 0)
+ return 0;
+
+ if (speclen == 2 &&
+ memcmp(spec, "id", 2) == 0)
+ return 1;
+
+ if (speclen == subtype->name_len &&
+ memcmp(spec, subtype->name, speclen) == 0)
+ return 1;
+
+ return 0;
+}
+
+/*
+ * Describe the asymmetric key
+ */
+static void asymmetric_key_describe(const struct key *key, struct seq_file *m)
+{
+ const struct asymmetric_key_subtype *subtype = asymmetric_key_subtype(key);
+ const char *kid = asymmetric_key_id(key);
+ size_t n;
+
+ seq_puts(m, key->description);
+
+ if (subtype) {
+ seq_puts(m, ": ");
+ subtype->describe(key, m);
+
+ if (kid) {
+ seq_putc(m, ' ');
+ n = strlen(kid);
+ if (n <= 8)
+ seq_puts(m, kid);
+ else
+ seq_puts(m, kid + n - 8);
+ }
+
+ seq_puts(m, " [");
+ /* put something here to indicate the key's capabilities */
+ seq_putc(m, ']');
+ }
+}
+
+/*
+ * Preparse a asymmetric payload to get format the contents appropriately for the
+ * internal payload to cut down on the number of scans of the data performed.
+ *
+ * We also generate a proposed description from the contents of the key that
+ * can be used to name the key if the user doesn't want to provide one.
+ */
+static int asymmetric_key_preparse(struct key_preparsed_payload *prep)
+{
+ struct asymmetric_key_parser *parser;
+ int ret;
+
+ pr_devel("==>%s()\n", __func__);
+
+ if (prep->datalen == 0)
+ return -EINVAL;
+
+ down_read(&asymmetric_key_parsers_sem);
+
+ ret = -EBADMSG;
+ list_for_each_entry(parser, &asymmetric_key_parsers, link) {
+ pr_debug("Trying parser '%s'\n", parser->name);
+
+ ret = parser->parse(prep);
+ if (ret != -EBADMSG) {
+ pr_debug("Parser recognised the format (ret %d)\n",
+ ret);
+ break;
+ }
+ }
+
+ up_read(&asymmetric_key_parsers_sem);
+ pr_devel("<==%s() = %d\n", __func__, ret);
+ return ret;
+}
+
+/*
+ * Clean up the preparse data
+ */
+static void asymmetric_key_free_preparse(struct key_preparsed_payload *prep)
+{
+ struct asymmetric_key_subtype *subtype = prep->type_data[0];
+
+ pr_devel("==>%s()\n", __func__);
+
+ if (subtype) {
+ subtype->destroy(prep->payload);
+ module_put(subtype->owner);
+ }
+ kfree(prep->type_data[1]);
+ kfree(prep->description);
+}
+
+/*
+ * Instantiate a asymmetric_key defined key. The key was preparsed, so we just
+ * have to transfer the data here.
+ */
+static int asymmetric_key_instantiate(struct key *key, struct key_preparsed_payload *prep)
+{
+ int ret;
+
+ pr_devel("==>%s()\n", __func__);
+
+ ret = key_payload_reserve(key, prep->quotalen);
+ if (ret == 0) {
+ key->type_data.p[0] = prep->type_data[0];
+ key->type_data.p[1] = prep->type_data[1];
+ key->payload.data = prep->payload;
+ prep->type_data[0] = NULL;
+ prep->type_data[1] = NULL;
+ prep->payload = NULL;
+ }
+ pr_devel("<==%s() = %d\n", __func__, ret);
+ return ret;
+}
+
+/*
+ * dispose of the data dangling from the corpse of a asymmetric key
+ */
+static void asymmetric_key_destroy(struct key *key)
+{
+ struct asymmetric_key_subtype *subtype = asymmetric_key_subtype(key);
+ if (subtype) {
+ subtype->destroy(key->payload.data);
+ module_put(subtype->owner);
+ key->type_data.p[0] = NULL;
+ }
+ kfree(key->type_data.p[1]);
+ key->type_data.p[1] = NULL;
+}
+
+struct key_type key_type_asymmetric = {
+ .name = "asymmetric",
+ .preparse = asymmetric_key_preparse,
+ .free_preparse = asymmetric_key_free_preparse,
+ .instantiate = asymmetric_key_instantiate,
+ .match = asymmetric_key_match,
+ .destroy = asymmetric_key_destroy,
+ .describe = asymmetric_key_describe,
+ .def_lookup_type = KEYRING_SEARCH_LOOKUP_ITERATE,
+};
+EXPORT_SYMBOL_GPL(key_type_asymmetric);
+
+/**
+ * register_asymmetric_key_parser - Register a asymmetric key blob parser
+ * @parser: The parser to register
+ */
+int register_asymmetric_key_parser(struct asymmetric_key_parser *parser)
+{
+ struct asymmetric_key_parser *cursor;
+ int ret;
+
+ down_write(&asymmetric_key_parsers_sem);
+
+ list_for_each_entry(cursor, &asymmetric_key_parsers, link) {
+ if (strcmp(cursor->name, parser->name) == 0) {
+ pr_err("Asymmetric key parser '%s' already registered\n",
+ parser->name);
+ ret = -EEXIST;
+ goto out;
+ }
+ }
+
+ list_add_tail(&parser->link, &asymmetric_key_parsers);
+
+ pr_notice("Asymmetric key parser '%s' registered\n", parser->name);
+ ret = 0;
+
+out:
+ up_write(&asymmetric_key_parsers_sem);
+ return ret;
+}
+EXPORT_SYMBOL_GPL(register_asymmetric_key_parser);
+
+/**
+ * unregister_asymmetric_key_parser - Unregister a asymmetric key blob parser
+ * @parser: The parser to unregister
+ */
+void unregister_asymmetric_key_parser(struct asymmetric_key_parser *parser)
+{
+ down_write(&asymmetric_key_parsers_sem);
+ list_del(&parser->link);
+ up_write(&asymmetric_key_parsers_sem);
+
+ pr_notice("Asymmetric key parser '%s' unregistered\n", parser->name);
+}
+EXPORT_SYMBOL_GPL(unregister_asymmetric_key_parser);
+
+/*
+ * Module stuff
+ */
+static int __init asymmetric_key_init(void)
+{
+ return register_key_type(&key_type_asymmetric);
+}
+
+static void __exit asymmetric_key_cleanup(void)
+{
+ unregister_key_type(&key_type_asymmetric);
+}
+
+module_init(asymmetric_key_init);
+module_exit(asymmetric_key_cleanup);
diff --git a/crypto/asymmetric_keys/public_key.c b/crypto/asymmetric_keys/public_key.c
new file mode 100644
index 00000000000..97eb001960b
--- /dev/null
+++ b/crypto/asymmetric_keys/public_key.c
@@ -0,0 +1,128 @@
+/* In-software asymmetric public-key crypto subtype
+ *
+ * See Documentation/crypto/asymmetric-keys.txt
+ *
+ * Copyright (C) 2012 Red Hat, Inc. All Rights Reserved.
+ * Written by David Howells (dhowells@redhat.com)
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public Licence
+ * as published by the Free Software Foundation; either version
+ * 2 of the Licence, or (at your option) any later version.
+ */
+
+#define pr_fmt(fmt) "PKEY: "fmt
+#include <linux/module.h>
+#include <linux/export.h>
+#include <linux/kernel.h>
+#include <linux/slab.h>
+#include <linux/seq_file.h>
+#include <keys/asymmetric-subtype.h>
+#include "public_key.h"
+
+MODULE_LICENSE("GPL");
+
+const char *const pkey_algo_name[PKEY_ALGO__LAST] = {
+ [PKEY_ALGO_DSA] = "DSA",
+ [PKEY_ALGO_RSA] = "RSA",
+};
+EXPORT_SYMBOL_GPL(pkey_algo_name);
+
+const struct public_key_algorithm *pkey_algo[PKEY_ALGO__LAST] = {
+#if defined(CONFIG_PUBLIC_KEY_ALGO_RSA) || \
+ defined(CONFIG_PUBLIC_KEY_ALGO_RSA_MODULE)
+ [PKEY_ALGO_RSA] = &RSA_public_key_algorithm,
+#endif
+};
+EXPORT_SYMBOL_GPL(pkey_algo);
+
+const char *const pkey_id_type_name[PKEY_ID_TYPE__LAST] = {
+ [PKEY_ID_PGP] = "PGP",
+ [PKEY_ID_X509] = "X509",
+};
+EXPORT_SYMBOL_GPL(pkey_id_type_name);
+
+/*
+ * Provide a part of a description of the key for /proc/keys.
+ */
+static void public_key_describe(const struct key *asymmetric_key,
+ struct seq_file *m)
+{
+ struct public_key *key = asymmetric_key->payload.data;
+
+ if (key)
+ seq_printf(m, "%s.%s",
+ pkey_id_type_name[key->id_type], key->algo->name);
+}
+
+/*
+ * Destroy a public key algorithm key.
+ */
+void public_key_destroy(void *payload)
+{
+ struct public_key *key = payload;
+ int i;
+
+ if (key) {
+ for (i = 0; i < ARRAY_SIZE(key->mpi); i++)
+ mpi_free(key->mpi[i]);
+ kfree(key);
+ }
+}
+EXPORT_SYMBOL_GPL(public_key_destroy);
+
+/*
+ * Verify a signature using a public key.
+ */
+int public_key_verify_signature(const struct public_key *pk,
+ const struct public_key_signature *sig)
+{
+ const struct public_key_algorithm *algo;
+
+ BUG_ON(!pk);
+ BUG_ON(!pk->mpi[0]);
+ BUG_ON(!pk->mpi[1]);
+ BUG_ON(!sig);
+ BUG_ON(!sig->digest);
+ BUG_ON(!sig->mpi[0]);
+
+ algo = pk->algo;
+ if (!algo) {
+ if (pk->pkey_algo >= PKEY_ALGO__LAST)
+ return -ENOPKG;
+ algo = pkey_algo[pk->pkey_algo];
+ if (!algo)
+ return -ENOPKG;
+ }
+
+ if (!algo->verify_signature)
+ return -ENOTSUPP;
+
+ if (sig->nr_mpi != algo->n_sig_mpi) {
+ pr_debug("Signature has %u MPI not %u\n",
+ sig->nr_mpi, algo->n_sig_mpi);
+ return -EINVAL;
+ }
+
+ return algo->verify_signature(pk, sig);
+}
+EXPORT_SYMBOL_GPL(public_key_verify_signature);
+
+static int public_key_verify_signature_2(const struct key *key,
+ const struct public_key_signature *sig)
+{
+ const struct public_key *pk = key->payload.data;
+ return public_key_verify_signature(pk, sig);
+}
+
+/*
+ * Public key algorithm asymmetric key subtype
+ */
+struct asymmetric_key_subtype public_key_subtype = {
+ .owner = THIS_MODULE,
+ .name = "public_key",
+ .describe = public_key_describe,
+ .destroy = public_key_destroy,
+ .verify_signature = public_key_verify_signature_2,
+};
+EXPORT_SYMBOL_GPL(public_key_subtype);
diff --git a/crypto/asymmetric_keys/public_key.h b/crypto/asymmetric_keys/public_key.h
new file mode 100644
index 00000000000..5c37a22a063
--- /dev/null
+++ b/crypto/asymmetric_keys/public_key.h
@@ -0,0 +1,36 @@
+/* Public key algorithm internals
+ *
+ * See Documentation/crypto/asymmetric-keys.txt
+ *
+ * Copyright (C) 2012 Red Hat, Inc. All Rights Reserved.
+ * Written by David Howells (dhowells@redhat.com)
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public Licence
+ * as published by the Free Software Foundation; either version
+ * 2 of the Licence, or (at your option) any later version.
+ */
+
+#include <crypto/public_key.h>
+
+extern struct asymmetric_key_subtype public_key_subtype;
+
+/*
+ * Public key algorithm definition.
+ */
+struct public_key_algorithm {
+ const char *name;
+ u8 n_pub_mpi; /* Number of MPIs in public key */
+ u8 n_sec_mpi; /* Number of MPIs in secret key */
+ u8 n_sig_mpi; /* Number of MPIs in a signature */
+ int (*verify_signature)(const struct public_key *key,
+ const struct public_key_signature *sig);
+};
+
+extern const struct public_key_algorithm RSA_public_key_algorithm;
+
+/*
+ * public_key.c
+ */
+extern int public_key_verify_signature(const struct public_key *pk,
+ const struct public_key_signature *sig);
diff --git a/crypto/asymmetric_keys/rsa.c b/crypto/asymmetric_keys/rsa.c
new file mode 100644
index 00000000000..459cf97a75e
--- /dev/null
+++ b/crypto/asymmetric_keys/rsa.c
@@ -0,0 +1,278 @@
+/* RSA asymmetric public-key algorithm [RFC3447]
+ *
+ * Copyright (C) 2012 Red Hat, Inc. All Rights Reserved.
+ * Written by David Howells (dhowells@redhat.com)
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public Licence
+ * as published by the Free Software Foundation; either version
+ * 2 of the Licence, or (at your option) any later version.
+ */
+
+#define pr_fmt(fmt) "RSA: "fmt
+#include <linux/module.h>
+#include <linux/kernel.h>
+#include <linux/slab.h>
+#include <crypto/algapi.h>
+#include "public_key.h"
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("RSA Public Key Algorithm");
+
+#define kenter(FMT, ...) \
+ pr_devel("==> %s("FMT")\n", __func__, ##__VA_ARGS__)
+#define kleave(FMT, ...) \
+ pr_devel("<== %s()"FMT"\n", __func__, ##__VA_ARGS__)
+
+/*
+ * Hash algorithm OIDs plus ASN.1 DER wrappings [RFC4880 sec 5.2.2].
+ */
+static const u8 RSA_digest_info_MD5[] = {
+ 0x30, 0x20, 0x30, 0x0C, 0x06, 0x08,
+ 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x02, 0x05, /* OID */
+ 0x05, 0x00, 0x04, 0x10
+};
+
+static const u8 RSA_digest_info_SHA1[] = {
+ 0x30, 0x21, 0x30, 0x09, 0x06, 0x05,
+ 0x2B, 0x0E, 0x03, 0x02, 0x1A,
+ 0x05, 0x00, 0x04, 0x14
+};
+
+static const u8 RSA_digest_info_RIPE_MD_160[] = {
+ 0x30, 0x21, 0x30, 0x09, 0x06, 0x05,
+ 0x2B, 0x24, 0x03, 0x02, 0x01,
+ 0x05, 0x00, 0x04, 0x14
+};
+
+static const u8 RSA_digest_info_SHA224[] = {
+ 0x30, 0x2d, 0x30, 0x0d, 0x06, 0x09,
+ 0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x04,
+ 0x05, 0x00, 0x04, 0x1C
+};
+
+static const u8 RSA_digest_info_SHA256[] = {
+ 0x30, 0x31, 0x30, 0x0d, 0x06, 0x09,
+ 0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01,
+ 0x05, 0x00, 0x04, 0x20
+};
+
+static const u8 RSA_digest_info_SHA384[] = {
+ 0x30, 0x41, 0x30, 0x0d, 0x06, 0x09,
+ 0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x02,
+ 0x05, 0x00, 0x04, 0x30
+};
+
+static const u8 RSA_digest_info_SHA512[] = {
+ 0x30, 0x51, 0x30, 0x0d, 0x06, 0x09,
+ 0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03,
+ 0x05, 0x00, 0x04, 0x40
+};
+
+static const struct {
+ const u8 *data;
+ size_t size;
+} RSA_ASN1_templates[PKEY_HASH__LAST] = {
+#define _(X) { RSA_digest_info_##X, sizeof(RSA_digest_info_##X) }
+ [HASH_ALGO_MD5] = _(MD5),
+ [HASH_ALGO_SHA1] = _(SHA1),
+ [HASH_ALGO_RIPE_MD_160] = _(RIPE_MD_160),
+ [HASH_ALGO_SHA256] = _(SHA256),
+ [HASH_ALGO_SHA384] = _(SHA384),
+ [HASH_ALGO_SHA512] = _(SHA512),
+ [HASH_ALGO_SHA224] = _(SHA224),
+#undef _
+};
+
+/*
+ * RSAVP1() function [RFC3447 sec 5.2.2]
+ */
+static int RSAVP1(const struct public_key *key, MPI s, MPI *_m)
+{
+ MPI m;
+ int ret;
+
+ /* (1) Validate 0 <= s < n */
+ if (mpi_cmp_ui(s, 0) < 0) {
+ kleave(" = -EBADMSG [s < 0]");
+ return -EBADMSG;
+ }
+ if (mpi_cmp(s, key->rsa.n) >= 0) {
+ kleave(" = -EBADMSG [s >= n]");
+ return -EBADMSG;
+ }
+
+ m = mpi_alloc(0);
+ if (!m)
+ return -ENOMEM;
+
+ /* (2) m = s^e mod n */
+ ret = mpi_powm(m, s, key->rsa.e, key->rsa.n);
+ if (ret < 0) {
+ mpi_free(m);
+ return ret;
+ }
+
+ *_m = m;
+ return 0;
+}
+
+/*
+ * Integer to Octet String conversion [RFC3447 sec 4.1]
+ */
+static int RSA_I2OSP(MPI x, size_t xLen, u8 **_X)
+{
+ unsigned X_size, x_size;
+ int X_sign;
+ u8 *X;
+
+ /* Make sure the string is the right length. The number should begin
+ * with { 0x00, 0x01, ... } so we have to account for 15 leading zero
+ * bits not being reported by MPI.
+ */
+ x_size = mpi_get_nbits(x);
+ pr_devel("size(x)=%u xLen*8=%zu\n", x_size, xLen * 8);
+ if (x_size != xLen * 8 - 15)
+ return -ERANGE;
+
+ X = mpi_get_buffer(x, &X_size, &X_sign);
+ if (!X)
+ return -ENOMEM;
+ if (X_sign < 0) {
+ kfree(X);
+ return -EBADMSG;
+ }
+ if (X_size != xLen - 1) {
+ kfree(X);
+ return -EBADMSG;
+ }
+
+ *_X = X;
+ return 0;
+}
+
+/*
+ * Perform the RSA signature verification.
+ * @H: Value of hash of data and metadata
+ * @EM: The computed signature value
+ * @k: The size of EM (EM[0] is an invalid location but should hold 0x00)
+ * @hash_size: The size of H
+ * @asn1_template: The DigestInfo ASN.1 template
+ * @asn1_size: Size of asm1_template[]
+ */
+static int RSA_verify(const u8 *H, const u8 *EM, size_t k, size_t hash_size,
+ const u8 *asn1_template, size_t asn1_size)
+{
+ unsigned PS_end, T_offset, i;
+
+ kenter(",,%zu,%zu,%zu", k, hash_size, asn1_size);
+
+ if (k < 2 + 1 + asn1_size + hash_size)
+ return -EBADMSG;
+
+ /* Decode the EMSA-PKCS1-v1_5 */
+ if (EM[1] != 0x01) {
+ kleave(" = -EBADMSG [EM[1] == %02u]", EM[1]);
+ return -EBADMSG;
+ }
+
+ T_offset = k - (asn1_size + hash_size);
+ PS_end = T_offset - 1;
+ if (EM[PS_end] != 0x00) {
+ kleave(" = -EBADMSG [EM[T-1] == %02u]", EM[PS_end]);
+ return -EBADMSG;
+ }
+
+ for (i = 2; i < PS_end; i++) {
+ if (EM[i] != 0xff) {
+ kleave(" = -EBADMSG [EM[PS%x] == %02u]", i - 2, EM[i]);
+ return -EBADMSG;
+ }
+ }
+
+ if (crypto_memneq(asn1_template, EM + T_offset, asn1_size) != 0) {
+ kleave(" = -EBADMSG [EM[T] ASN.1 mismatch]");
+ return -EBADMSG;
+ }
+
+ if (crypto_memneq(H, EM + T_offset + asn1_size, hash_size) != 0) {
+ kleave(" = -EKEYREJECTED [EM[T] hash mismatch]");
+ return -EKEYREJECTED;
+ }
+
+ kleave(" = 0");
+ return 0;
+}
+
+/*
+ * Perform the verification step [RFC3447 sec 8.2.2].
+ */
+static int RSA_verify_signature(const struct public_key *key,
+ const struct public_key_signature *sig)
+{
+ size_t tsize;
+ int ret;
+
+ /* Variables as per RFC3447 sec 8.2.2 */
+ const u8 *H = sig->digest;
+ u8 *EM = NULL;
+ MPI m = NULL;
+ size_t k;
+
+ kenter("");
+
+ if (!RSA_ASN1_templates[sig->pkey_hash_algo].data)
+ return -ENOTSUPP;
+
+ /* (1) Check the signature size against the public key modulus size */
+ k = mpi_get_nbits(key->rsa.n);
+ tsize = mpi_get_nbits(sig->rsa.s);
+
+ /* According to RFC 4880 sec 3.2, length of MPI is computed starting
+ * from most significant bit. So the RFC 3447 sec 8.2.2 size check
+ * must be relaxed to conform with shorter signatures - so we fail here
+ * only if signature length is longer than modulus size.
+ */
+ pr_devel("step 1: k=%zu size(S)=%zu\n", k, tsize);
+ if (k < tsize) {
+ ret = -EBADMSG;
+ goto error;
+ }
+
+ /* Round up and convert to octets */
+ k = (k + 7) / 8;
+
+ /* (2b) Apply the RSAVP1 verification primitive to the public key */
+ ret = RSAVP1(key, sig->rsa.s, &m);
+ if (ret < 0)
+ goto error;
+
+ /* (2c) Convert the message representative (m) to an encoded message
+ * (EM) of length k octets.
+ *
+ * NOTE! The leading zero byte is suppressed by MPI, so we pass a
+ * pointer to the _preceding_ byte to RSA_verify()!
+ */
+ ret = RSA_I2OSP(m, k, &EM);
+ if (ret < 0)
+ goto error;
+
+ ret = RSA_verify(H, EM - 1, k, sig->digest_size,
+ RSA_ASN1_templates[sig->pkey_hash_algo].data,
+ RSA_ASN1_templates[sig->pkey_hash_algo].size);
+
+error:
+ kfree(EM);
+ mpi_free(m);
+ kleave(" = %d", ret);
+ return ret;
+}
+
+const struct public_key_algorithm RSA_public_key_algorithm = {
+ .name = "RSA",
+ .n_pub_mpi = 2,
+ .n_sec_mpi = 3,
+ .n_sig_mpi = 1,
+ .verify_signature = RSA_verify_signature,
+};
+EXPORT_SYMBOL_GPL(RSA_public_key_algorithm);
diff --git a/crypto/asymmetric_keys/signature.c b/crypto/asymmetric_keys/signature.c
new file mode 100644
index 00000000000..50b3f880b4f
--- /dev/null
+++ b/crypto/asymmetric_keys/signature.c
@@ -0,0 +1,49 @@
+/* Signature verification with an asymmetric key
+ *
+ * See Documentation/security/asymmetric-keys.txt
+ *
+ * Copyright (C) 2012 Red Hat, Inc. All Rights Reserved.
+ * Written by David Howells (dhowells@redhat.com)
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public Licence
+ * as published by the Free Software Foundation; either version
+ * 2 of the Licence, or (at your option) any later version.
+ */
+
+#include <keys/asymmetric-subtype.h>
+#include <linux/module.h>
+#include <linux/err.h>
+#include <crypto/public_key.h>
+#include "asymmetric_keys.h"
+
+/**
+ * verify_signature - Initiate the use of an asymmetric key to verify a signature
+ * @key: The asymmetric key to verify against
+ * @sig: The signature to check
+ *
+ * Returns 0 if successful or else an error.
+ */
+int verify_signature(const struct key *key,
+ const struct public_key_signature *sig)
+{
+ const struct asymmetric_key_subtype *subtype;
+ int ret;
+
+ pr_devel("==>%s()\n", __func__);
+
+ if (key->type != &key_type_asymmetric)
+ return -EINVAL;
+ subtype = asymmetric_key_subtype(key);
+ if (!subtype ||
+ !key->payload.data)
+ return -EINVAL;
+ if (!subtype->verify_signature)
+ return -ENOTSUPP;
+
+ ret = subtype->verify_signature(key, sig);
+
+ pr_devel("<==%s() = %d\n", __func__, ret);
+ return ret;
+}
+EXPORT_SYMBOL_GPL(verify_signature);
diff --git a/crypto/asymmetric_keys/x509.asn1 b/crypto/asymmetric_keys/x509.asn1
new file mode 100644
index 00000000000..bf32b3dff08
--- /dev/null
+++ b/crypto/asymmetric_keys/x509.asn1
@@ -0,0 +1,60 @@
+Certificate ::= SEQUENCE {
+ tbsCertificate TBSCertificate ({ x509_note_tbs_certificate }),
+ signatureAlgorithm AlgorithmIdentifier,
+ signature BIT STRING ({ x509_note_signature })
+ }
+
+TBSCertificate ::= SEQUENCE {
+ version [ 0 ] Version DEFAULT,
+ serialNumber CertificateSerialNumber,
+ signature AlgorithmIdentifier ({ x509_note_pkey_algo }),
+ issuer Name ({ x509_note_issuer }),
+ validity Validity,
+ subject Name ({ x509_note_subject }),
+ subjectPublicKeyInfo SubjectPublicKeyInfo,
+ issuerUniqueID [ 1 ] IMPLICIT UniqueIdentifier OPTIONAL,
+ subjectUniqueID [ 2 ] IMPLICIT UniqueIdentifier OPTIONAL,
+ extensions [ 3 ] Extensions OPTIONAL
+ }
+
+Version ::= INTEGER
+CertificateSerialNumber ::= INTEGER
+
+AlgorithmIdentifier ::= SEQUENCE {
+ algorithm OBJECT IDENTIFIER ({ x509_note_OID }),
+ parameters ANY OPTIONAL
+}
+
+Name ::= SEQUENCE OF RelativeDistinguishedName
+
+RelativeDistinguishedName ::= SET OF AttributeValueAssertion
+
+AttributeValueAssertion ::= SEQUENCE {
+ attributeType OBJECT IDENTIFIER ({ x509_note_OID }),
+ attributeValue ANY ({ x509_extract_name_segment })
+ }
+
+Validity ::= SEQUENCE {
+ notBefore Time ({ x509_note_not_before }),
+ notAfter Time ({ x509_note_not_after })
+ }
+
+Time ::= CHOICE {
+ utcTime UTCTime,
+ generalTime GeneralizedTime
+ }
+
+SubjectPublicKeyInfo ::= SEQUENCE {
+ algorithm AlgorithmIdentifier,
+ subjectPublicKey BIT STRING ({ x509_extract_key_data })
+ }
+
+UniqueIdentifier ::= BIT STRING
+
+Extensions ::= SEQUENCE OF Extension
+
+Extension ::= SEQUENCE {
+ extnid OBJECT IDENTIFIER ({ x509_note_OID }),
+ critical BOOLEAN DEFAULT,
+ extnValue OCTET STRING ({ x509_process_extension })
+ }
diff --git a/crypto/asymmetric_keys/x509_cert_parser.c b/crypto/asymmetric_keys/x509_cert_parser.c
new file mode 100644
index 00000000000..29893162497
--- /dev/null
+++ b/crypto/asymmetric_keys/x509_cert_parser.c
@@ -0,0 +1,538 @@
+/* X.509 certificate parser
+ *
+ * Copyright (C) 2012 Red Hat, Inc. All Rights Reserved.
+ * Written by David Howells (dhowells@redhat.com)
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public Licence
+ * as published by the Free Software Foundation; either version
+ * 2 of the Licence, or (at your option) any later version.
+ */
+
+#define pr_fmt(fmt) "X.509: "fmt
+#include <linux/kernel.h>
+#include <linux/slab.h>
+#include <linux/err.h>
+#include <linux/oid_registry.h>
+#include "public_key.h"
+#include "x509_parser.h"
+#include "x509-asn1.h"
+#include "x509_rsakey-asn1.h"
+
+struct x509_parse_context {
+ struct x509_certificate *cert; /* Certificate being constructed */
+ unsigned long data; /* Start of data */
+ const void *cert_start; /* Start of cert content */
+ const void *key; /* Key data */
+ size_t key_size; /* Size of key data */
+ enum OID last_oid; /* Last OID encountered */
+ enum OID algo_oid; /* Algorithm OID */
+ unsigned char nr_mpi; /* Number of MPIs stored */
+ u8 o_size; /* Size of organizationName (O) */
+ u8 cn_size; /* Size of commonName (CN) */
+ u8 email_size; /* Size of emailAddress */
+ u16 o_offset; /* Offset of organizationName (O) */
+ u16 cn_offset; /* Offset of commonName (CN) */
+ u16 email_offset; /* Offset of emailAddress */
+};
+
+/*
+ * Free an X.509 certificate
+ */
+void x509_free_certificate(struct x509_certificate *cert)
+{
+ if (cert) {
+ public_key_destroy(cert->pub);
+ kfree(cert->issuer);
+ kfree(cert->subject);
+ kfree(cert->fingerprint);
+ kfree(cert->authority);
+ kfree(cert->sig.digest);
+ mpi_free(cert->sig.rsa.s);
+ kfree(cert);
+ }
+}
+
+/*
+ * Parse an X.509 certificate
+ */
+struct x509_certificate *x509_cert_parse(const void *data, size_t datalen)
+{
+ struct x509_certificate *cert;
+ struct x509_parse_context *ctx;
+ long ret;
+
+ ret = -ENOMEM;
+ cert = kzalloc(sizeof(struct x509_certificate), GFP_KERNEL);
+ if (!cert)
+ goto error_no_cert;
+ cert->pub = kzalloc(sizeof(struct public_key), GFP_KERNEL);
+ if (!cert->pub)
+ goto error_no_ctx;
+ ctx = kzalloc(sizeof(struct x509_parse_context), GFP_KERNEL);
+ if (!ctx)
+ goto error_no_ctx;
+
+ ctx->cert = cert;
+ ctx->data = (unsigned long)data;
+
+ /* Attempt to decode the certificate */
+ ret = asn1_ber_decoder(&x509_decoder, ctx, data, datalen);
+ if (ret < 0)
+ goto error_decode;
+
+ /* Decode the public key */
+ ret = asn1_ber_decoder(&x509_rsakey_decoder, ctx,
+ ctx->key, ctx->key_size);
+ if (ret < 0)
+ goto error_decode;
+
+ kfree(ctx);
+ return cert;
+
+error_decode:
+ kfree(ctx);
+error_no_ctx:
+ x509_free_certificate(cert);
+error_no_cert:
+ return ERR_PTR(ret);
+}
+
+/*
+ * Note an OID when we find one for later processing when we know how
+ * to interpret it.
+ */
+int x509_note_OID(void *context, size_t hdrlen,
+ unsigned char tag,
+ const void *value, size_t vlen)
+{
+ struct x509_parse_context *ctx = context;
+
+ ctx->last_oid = look_up_OID(value, vlen);
+ if (ctx->last_oid == OID__NR) {
+ char buffer[50];
+ sprint_oid(value, vlen, buffer, sizeof(buffer));
+ pr_debug("Unknown OID: [%lu] %s\n",
+ (unsigned long)value - ctx->data, buffer);
+ }
+ return 0;
+}
+
+/*
+ * Save the position of the TBS data so that we can check the signature over it
+ * later.
+ */
+int x509_note_tbs_certificate(void *context, size_t hdrlen,
+ unsigned char tag,
+ const void *value, size_t vlen)
+{
+ struct x509_parse_context *ctx = context;
+
+ pr_debug("x509_note_tbs_certificate(,%zu,%02x,%ld,%zu)!\n",
+ hdrlen, tag, (unsigned long)value - ctx->data, vlen);
+
+ ctx->cert->tbs = value - hdrlen;
+ ctx->cert->tbs_size = vlen + hdrlen;
+ return 0;
+}
+
+/*
+ * Record the public key algorithm
+ */
+int x509_note_pkey_algo(void *context, size_t hdrlen,
+ unsigned char tag,
+ const void *value, size_t vlen)
+{
+ struct x509_parse_context *ctx = context;
+
+ pr_debug("PubKey Algo: %u\n", ctx->last_oid);
+
+ switch (ctx->last_oid) {
+ case OID_md2WithRSAEncryption:
+ case OID_md3WithRSAEncryption:
+ default:
+ return -ENOPKG; /* Unsupported combination */
+
+ case OID_md4WithRSAEncryption:
+ ctx->cert->sig.pkey_hash_algo = HASH_ALGO_MD5;
+ ctx->cert->sig.pkey_algo = PKEY_ALGO_RSA;
+ break;
+
+ case OID_sha1WithRSAEncryption:
+ ctx->cert->sig.pkey_hash_algo = HASH_ALGO_SHA1;
+ ctx->cert->sig.pkey_algo = PKEY_ALGO_RSA;
+ break;
+
+ case OID_sha256WithRSAEncryption:
+ ctx->cert->sig.pkey_hash_algo = HASH_ALGO_SHA256;
+ ctx->cert->sig.pkey_algo = PKEY_ALGO_RSA;
+ break;
+
+ case OID_sha384WithRSAEncryption:
+ ctx->cert->sig.pkey_hash_algo = HASH_ALGO_SHA384;
+ ctx->cert->sig.pkey_algo = PKEY_ALGO_RSA;
+ break;
+
+ case OID_sha512WithRSAEncryption:
+ ctx->cert->sig.pkey_hash_algo = HASH_ALGO_SHA512;
+ ctx->cert->sig.pkey_algo = PKEY_ALGO_RSA;
+ break;
+
+ case OID_sha224WithRSAEncryption:
+ ctx->cert->sig.pkey_hash_algo = HASH_ALGO_SHA224;
+ ctx->cert->sig.pkey_algo = PKEY_ALGO_RSA;
+ break;
+ }
+
+ ctx->algo_oid = ctx->last_oid;
+ return 0;
+}
+
+/*
+ * Note the whereabouts and type of the signature.
+ */
+int x509_note_signature(void *context, size_t hdrlen,
+ unsigned char tag,
+ const void *value, size_t vlen)
+{
+ struct x509_parse_context *ctx = context;
+
+ pr_debug("Signature type: %u size %zu\n", ctx->last_oid, vlen);
+
+ if (ctx->last_oid != ctx->algo_oid) {
+ pr_warn("Got cert with pkey (%u) and sig (%u) algorithm OIDs\n",
+ ctx->algo_oid, ctx->last_oid);
+ return -EINVAL;
+ }
+
+ ctx->cert->raw_sig = value;
+ ctx->cert->raw_sig_size = vlen;
+ return 0;
+}
+
+/*
+ * Note some of the name segments from which we'll fabricate a name.
+ */
+int x509_extract_name_segment(void *context, size_t hdrlen,
+ unsigned char tag,
+ const void *value, size_t vlen)
+{
+ struct x509_parse_context *ctx = context;
+
+ switch (ctx->last_oid) {
+ case OID_commonName:
+ ctx->cn_size = vlen;
+ ctx->cn_offset = (unsigned long)value - ctx->data;
+ break;
+ case OID_organizationName:
+ ctx->o_size = vlen;
+ ctx->o_offset = (unsigned long)value - ctx->data;
+ break;
+ case OID_email_address:
+ ctx->email_size = vlen;
+ ctx->email_offset = (unsigned long)value - ctx->data;
+ break;
+ default:
+ break;
+ }
+
+ return 0;
+}
+
+/*
+ * Fabricate and save the issuer and subject names
+ */
+static int x509_fabricate_name(struct x509_parse_context *ctx, size_t hdrlen,
+ unsigned char tag,
+ char **_name, size_t vlen)
+{
+ const void *name, *data = (const void *)ctx->data;
+ size_t namesize;
+ char *buffer;
+
+ if (*_name)
+ return -EINVAL;
+
+ /* Empty name string if no material */
+ if (!ctx->cn_size && !ctx->o_size && !ctx->email_size) {
+ buffer = kmalloc(1, GFP_KERNEL);
+ if (!buffer)
+ return -ENOMEM;
+ buffer[0] = 0;
+ goto done;
+ }
+
+ if (ctx->cn_size && ctx->o_size) {
+ /* Consider combining O and CN, but use only the CN if it is
+ * prefixed by the O, or a significant portion thereof.
+ */
+ namesize = ctx->cn_size;
+ name = data + ctx->cn_offset;
+ if (ctx->cn_size >= ctx->o_size &&
+ memcmp(data + ctx->cn_offset, data + ctx->o_offset,
+ ctx->o_size) == 0)
+ goto single_component;
+ if (ctx->cn_size >= 7 &&
+ ctx->o_size >= 7 &&
+ memcmp(data + ctx->cn_offset, data + ctx->o_offset, 7) == 0)
+ goto single_component;
+
+ buffer = kmalloc(ctx->o_size + 2 + ctx->cn_size + 1,
+ GFP_KERNEL);
+ if (!buffer)
+ return -ENOMEM;
+
+ memcpy(buffer,
+ data + ctx->o_offset, ctx->o_size);
+ buffer[ctx->o_size + 0] = ':';
+ buffer[ctx->o_size + 1] = ' ';
+ memcpy(buffer + ctx->o_size + 2,
+ data + ctx->cn_offset, ctx->cn_size);
+ buffer[ctx->o_size + 2 + ctx->cn_size] = 0;
+ goto done;
+
+ } else if (ctx->cn_size) {
+ namesize = ctx->cn_size;
+ name = data + ctx->cn_offset;
+ } else if (ctx->o_size) {
+ namesize = ctx->o_size;
+ name = data + ctx->o_offset;
+ } else {
+ namesize = ctx->email_size;
+ name = data + ctx->email_offset;
+ }
+
+single_component:
+ buffer = kmalloc(namesize + 1, GFP_KERNEL);
+ if (!buffer)
+ return -ENOMEM;
+ memcpy(buffer, name, namesize);
+ buffer[namesize] = 0;
+
+done:
+ *_name = buffer;
+ ctx->cn_size = 0;
+ ctx->o_size = 0;
+ ctx->email_size = 0;
+ return 0;
+}
+
+int x509_note_issuer(void *context, size_t hdrlen,
+ unsigned char tag,
+ const void *value, size_t vlen)
+{
+ struct x509_parse_context *ctx = context;
+ return x509_fabricate_name(ctx, hdrlen, tag, &ctx->cert->issuer, vlen);
+}
+
+int x509_note_subject(void *context, size_t hdrlen,
+ unsigned char tag,
+ const void *value, size_t vlen)
+{
+ struct x509_parse_context *ctx = context;
+ return x509_fabricate_name(ctx, hdrlen, tag, &ctx->cert->subject, vlen);
+}
+
+/*
+ * Extract the data for the public key algorithm
+ */
+int x509_extract_key_data(void *context, size_t hdrlen,
+ unsigned char tag,
+ const void *value, size_t vlen)
+{
+ struct x509_parse_context *ctx = context;
+
+ if (ctx->last_oid != OID_rsaEncryption)
+ return -ENOPKG;
+
+ ctx->cert->pub->pkey_algo = PKEY_ALGO_RSA;
+
+ /* Discard the BIT STRING metadata */
+ ctx->key = value + 1;
+ ctx->key_size = vlen - 1;
+ return 0;
+}
+
+/*
+ * Extract a RSA public key value
+ */
+int rsa_extract_mpi(void *context, size_t hdrlen,
+ unsigned char tag,
+ const void *value, size_t vlen)
+{
+ struct x509_parse_context *ctx = context;
+ MPI mpi;
+
+ if (ctx->nr_mpi >= ARRAY_SIZE(ctx->cert->pub->mpi)) {
+ pr_err("Too many public key MPIs in certificate\n");
+ return -EBADMSG;
+ }
+
+ mpi = mpi_read_raw_data(value, vlen);
+ if (!mpi)
+ return -ENOMEM;
+
+ ctx->cert->pub->mpi[ctx->nr_mpi++] = mpi;
+ return 0;
+}
+
+/* The keyIdentifier in AuthorityKeyIdentifier SEQUENCE is tag(CONT,PRIM,0) */
+#define SEQ_TAG_KEYID (ASN1_CONT << 6)
+
+/*
+ * Process certificate extensions that are used to qualify the certificate.
+ */
+int x509_process_extension(void *context, size_t hdrlen,
+ unsigned char tag,
+ const void *value, size_t vlen)
+{
+ struct x509_parse_context *ctx = context;
+ const unsigned char *v = value;
+ char *f;
+ int i;
+
+ pr_debug("Extension: %u\n", ctx->last_oid);
+
+ if (ctx->last_oid == OID_subjectKeyIdentifier) {
+ /* Get hold of the key fingerprint */
+ if (vlen < 3)
+ return -EBADMSG;
+ if (v[0] != ASN1_OTS || v[1] != vlen - 2)
+ return -EBADMSG;
+ v += 2;
+ vlen -= 2;
+
+ f = kmalloc(vlen * 2 + 1, GFP_KERNEL);
+ if (!f)
+ return -ENOMEM;
+ for (i = 0; i < vlen; i++)
+ sprintf(f + i * 2, "%02x", v[i]);
+ pr_debug("fingerprint %s\n", f);
+ ctx->cert->fingerprint = f;
+ return 0;
+ }
+
+ if (ctx->last_oid == OID_authorityKeyIdentifier) {
+ size_t key_len;
+
+ /* Get hold of the CA key fingerprint */
+ if (vlen < 5)
+ return -EBADMSG;
+
+ /* Authority Key Identifier must be a Constructed SEQUENCE */
+ if (v[0] != (ASN1_SEQ | (ASN1_CONS << 5)))
+ return -EBADMSG;
+
+ /* Authority Key Identifier is not indefinite length */
+ if (unlikely(vlen == ASN1_INDEFINITE_LENGTH))
+ return -EBADMSG;
+
+ if (vlen < ASN1_INDEFINITE_LENGTH) {
+ /* Short Form length */
+ if (v[1] != vlen - 2 ||
+ v[2] != SEQ_TAG_KEYID ||
+ v[3] > vlen - 4)
+ return -EBADMSG;
+
+ key_len = v[3];
+ v += 4;
+ } else {
+ /* Long Form length */
+ size_t seq_len = 0;
+ size_t sub = v[1] - ASN1_INDEFINITE_LENGTH;
+
+ if (sub > 2)
+ return -EBADMSG;
+
+ /* calculate the length from subsequent octets */
+ v += 2;
+ for (i = 0; i < sub; i++) {
+ seq_len <<= 8;
+ seq_len |= v[i];
+ }
+
+ if (seq_len != vlen - 2 - sub ||
+ v[sub] != SEQ_TAG_KEYID ||
+ v[sub + 1] > vlen - 4 - sub)
+ return -EBADMSG;
+
+ key_len = v[sub + 1];
+ v += (sub + 2);
+ }
+
+ f = kmalloc(key_len * 2 + 1, GFP_KERNEL);
+ if (!f)
+ return -ENOMEM;
+ for (i = 0; i < key_len; i++)
+ sprintf(f + i * 2, "%02x", v[i]);
+ pr_debug("authority %s\n", f);
+ ctx->cert->authority = f;
+ return 0;
+ }
+
+ return 0;
+}
+
+/*
+ * Record a certificate time.
+ */
+static int x509_note_time(struct tm *tm, size_t hdrlen,
+ unsigned char tag,
+ const unsigned char *value, size_t vlen)
+{
+ const unsigned char *p = value;
+
+#define dec2bin(X) ((X) - '0')
+#define DD2bin(P) ({ unsigned x = dec2bin(P[0]) * 10 + dec2bin(P[1]); P += 2; x; })
+
+ if (tag == ASN1_UNITIM) {
+ /* UTCTime: YYMMDDHHMMSSZ */
+ if (vlen != 13)
+ goto unsupported_time;
+ tm->tm_year = DD2bin(p);
+ if (tm->tm_year >= 50)
+ tm->tm_year += 1900;
+ else
+ tm->tm_year += 2000;
+ } else if (tag == ASN1_GENTIM) {
+ /* GenTime: YYYYMMDDHHMMSSZ */
+ if (vlen != 15)
+ goto unsupported_time;
+ tm->tm_year = DD2bin(p) * 100 + DD2bin(p);
+ } else {
+ goto unsupported_time;
+ }
+
+ tm->tm_year -= 1900;
+ tm->tm_mon = DD2bin(p) - 1;
+ tm->tm_mday = DD2bin(p);
+ tm->tm_hour = DD2bin(p);
+ tm->tm_min = DD2bin(p);
+ tm->tm_sec = DD2bin(p);
+
+ if (*p != 'Z')
+ goto unsupported_time;
+
+ return 0;
+
+unsupported_time:
+ pr_debug("Got unsupported time [tag %02x]: '%*.*s'\n",
+ tag, (int)vlen, (int)vlen, value);
+ return -EBADMSG;
+}
+
+int x509_note_not_before(void *context, size_t hdrlen,
+ unsigned char tag,
+ const void *value, size_t vlen)
+{
+ struct x509_parse_context *ctx = context;
+ return x509_note_time(&ctx->cert->valid_from, hdrlen, tag, value, vlen);
+}
+
+int x509_note_not_after(void *context, size_t hdrlen,
+ unsigned char tag,
+ const void *value, size_t vlen)
+{
+ struct x509_parse_context *ctx = context;
+ return x509_note_time(&ctx->cert->valid_to, hdrlen, tag, value, vlen);
+}
diff --git a/crypto/asymmetric_keys/x509_parser.h b/crypto/asymmetric_keys/x509_parser.h
new file mode 100644
index 00000000000..87d9cc26f63
--- /dev/null
+++ b/crypto/asymmetric_keys/x509_parser.h
@@ -0,0 +1,42 @@
+/* X.509 certificate parser internal definitions
+ *
+ * Copyright (C) 2012 Red Hat, Inc. All Rights Reserved.
+ * Written by David Howells (dhowells@redhat.com)
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public Licence
+ * as published by the Free Software Foundation; either version
+ * 2 of the Licence, or (at your option) any later version.
+ */
+
+#include <linux/time.h>
+#include <crypto/public_key.h>
+
+struct x509_certificate {
+ struct x509_certificate *next;
+ struct public_key *pub; /* Public key details */
+ char *issuer; /* Name of certificate issuer */
+ char *subject; /* Name of certificate subject */
+ char *fingerprint; /* Key fingerprint as hex */
+ char *authority; /* Authority key fingerprint as hex */
+ struct tm valid_from;
+ struct tm valid_to;
+ const void *tbs; /* Signed data */
+ unsigned tbs_size; /* Size of signed data */
+ unsigned raw_sig_size; /* Size of sigature */
+ const void *raw_sig; /* Signature data */
+ struct public_key_signature sig; /* Signature parameters */
+};
+
+/*
+ * x509_cert_parser.c
+ */
+extern void x509_free_certificate(struct x509_certificate *cert);
+extern struct x509_certificate *x509_cert_parse(const void *data, size_t datalen);
+
+/*
+ * x509_public_key.c
+ */
+extern int x509_get_sig_params(struct x509_certificate *cert);
+extern int x509_check_signature(const struct public_key *pub,
+ struct x509_certificate *cert);
diff --git a/crypto/asymmetric_keys/x509_public_key.c b/crypto/asymmetric_keys/x509_public_key.c
new file mode 100644
index 00000000000..382ef0d2ff2
--- /dev/null
+++ b/crypto/asymmetric_keys/x509_public_key.c
@@ -0,0 +1,218 @@
+/* Instantiate a public key crypto key from an X.509 Certificate
+ *
+ * Copyright (C) 2012 Red Hat, Inc. All Rights Reserved.
+ * Written by David Howells (dhowells@redhat.com)
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public Licence
+ * as published by the Free Software Foundation; either version
+ * 2 of the Licence, or (at your option) any later version.
+ */
+
+#define pr_fmt(fmt) "X.509: "fmt
+#include <linux/module.h>
+#include <linux/kernel.h>
+#include <linux/slab.h>
+#include <linux/err.h>
+#include <linux/mpi.h>
+#include <linux/asn1_decoder.h>
+#include <keys/asymmetric-subtype.h>
+#include <keys/asymmetric-parser.h>
+#include <crypto/hash.h>
+#include "asymmetric_keys.h"
+#include "public_key.h"
+#include "x509_parser.h"
+
+/*
+ * Set up the signature parameters in an X.509 certificate. This involves
+ * digesting the signed data and extracting the signature.
+ */
+int x509_get_sig_params(struct x509_certificate *cert)
+{
+ struct crypto_shash *tfm;
+ struct shash_desc *desc;
+ size_t digest_size, desc_size;
+ void *digest;
+ int ret;
+
+ pr_devel("==>%s()\n", __func__);
+
+ if (cert->sig.rsa.s)
+ return 0;
+
+ cert->sig.rsa.s = mpi_read_raw_data(cert->raw_sig, cert->raw_sig_size);
+ if (!cert->sig.rsa.s)
+ return -ENOMEM;
+ cert->sig.nr_mpi = 1;
+
+ /* Allocate the hashing algorithm we're going to need and find out how
+ * big the hash operational data will be.
+ */
+ tfm = crypto_alloc_shash(hash_algo_name[cert->sig.pkey_hash_algo], 0, 0);
+ if (IS_ERR(tfm))
+ return (PTR_ERR(tfm) == -ENOENT) ? -ENOPKG : PTR_ERR(tfm);
+
+ desc_size = crypto_shash_descsize(tfm) + sizeof(*desc);
+ digest_size = crypto_shash_digestsize(tfm);
+
+ /* We allocate the hash operational data storage on the end of the
+ * digest storage space.
+ */
+ ret = -ENOMEM;
+ digest = kzalloc(digest_size + desc_size, GFP_KERNEL);
+ if (!digest)
+ goto error;
+
+ cert->sig.digest = digest;
+ cert->sig.digest_size = digest_size;
+
+ desc = digest + digest_size;
+ desc->tfm = tfm;
+ desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
+
+ ret = crypto_shash_init(desc);
+ if (ret < 0)
+ goto error;
+ might_sleep();
+ ret = crypto_shash_finup(desc, cert->tbs, cert->tbs_size, digest);
+error:
+ crypto_free_shash(tfm);
+ pr_devel("<==%s() = %d\n", __func__, ret);
+ return ret;
+}
+EXPORT_SYMBOL_GPL(x509_get_sig_params);
+
+/*
+ * Check the signature on a certificate using the provided public key
+ */
+int x509_check_signature(const struct public_key *pub,
+ struct x509_certificate *cert)
+{
+ int ret;
+
+ pr_devel("==>%s()\n", __func__);
+
+ ret = x509_get_sig_params(cert);
+ if (ret < 0)
+ return ret;
+
+ ret = public_key_verify_signature(pub, &cert->sig);
+ pr_debug("Cert Verification: %d\n", ret);
+ return ret;
+}
+EXPORT_SYMBOL_GPL(x509_check_signature);
+
+/*
+ * Attempt to parse a data blob for a key as an X509 certificate.
+ */
+static int x509_key_preparse(struct key_preparsed_payload *prep)
+{
+ struct x509_certificate *cert;
+ size_t srlen, sulen;
+ char *desc = NULL;
+ int ret;
+
+ cert = x509_cert_parse(prep->data, prep->datalen);
+ if (IS_ERR(cert))
+ return PTR_ERR(cert);
+
+ pr_devel("Cert Issuer: %s\n", cert->issuer);
+ pr_devel("Cert Subject: %s\n", cert->subject);
+
+ if (cert->pub->pkey_algo >= PKEY_ALGO__LAST ||
+ cert->sig.pkey_algo >= PKEY_ALGO__LAST ||
+ cert->sig.pkey_hash_algo >= PKEY_HASH__LAST ||
+ !pkey_algo[cert->pub->pkey_algo] ||
+ !pkey_algo[cert->sig.pkey_algo] ||
+ !hash_algo_name[cert->sig.pkey_hash_algo]) {
+ ret = -ENOPKG;
+ goto error_free_cert;
+ }
+
+ pr_devel("Cert Key Algo: %s\n", pkey_algo_name[cert->pub->pkey_algo]);
+ pr_devel("Cert Valid From: %04ld-%02d-%02d %02d:%02d:%02d\n",
+ cert->valid_from.tm_year + 1900, cert->valid_from.tm_mon + 1,
+ cert->valid_from.tm_mday, cert->valid_from.tm_hour,
+ cert->valid_from.tm_min, cert->valid_from.tm_sec);
+ pr_devel("Cert Valid To: %04ld-%02d-%02d %02d:%02d:%02d\n",
+ cert->valid_to.tm_year + 1900, cert->valid_to.tm_mon + 1,
+ cert->valid_to.tm_mday, cert->valid_to.tm_hour,
+ cert->valid_to.tm_min, cert->valid_to.tm_sec);
+ pr_devel("Cert Signature: %s + %s\n",
+ pkey_algo_name[cert->sig.pkey_algo],
+ hash_algo_name[cert->sig.pkey_hash_algo]);
+
+ if (!cert->fingerprint) {
+ pr_warn("Cert for '%s' must have a SubjKeyId extension\n",
+ cert->subject);
+ ret = -EKEYREJECTED;
+ goto error_free_cert;
+ }
+
+ cert->pub->algo = pkey_algo[cert->pub->pkey_algo];
+ cert->pub->id_type = PKEY_ID_X509;
+
+ /* Check the signature on the key if it appears to be self-signed */
+ if (!cert->authority ||
+ strcmp(cert->fingerprint, cert->authority) == 0) {
+ ret = x509_check_signature(cert->pub, cert);
+ if (ret < 0)
+ goto error_free_cert;
+ }
+
+ /* Propose a description */
+ sulen = strlen(cert->subject);
+ srlen = strlen(cert->fingerprint);
+ ret = -ENOMEM;
+ desc = kmalloc(sulen + 2 + srlen + 1, GFP_KERNEL);
+ if (!desc)
+ goto error_free_cert;
+ memcpy(desc, cert->subject, sulen);
+ desc[sulen] = ':';
+ desc[sulen + 1] = ' ';
+ memcpy(desc + sulen + 2, cert->fingerprint, srlen);
+ desc[sulen + 2 + srlen] = 0;
+
+ /* We're pinning the module by being linked against it */
+ __module_get(public_key_subtype.owner);
+ prep->type_data[0] = &public_key_subtype;
+ prep->type_data[1] = cert->fingerprint;
+ prep->payload = cert->pub;
+ prep->description = desc;
+ prep->quotalen = 100;
+
+ /* We've finished with the certificate */
+ cert->pub = NULL;
+ cert->fingerprint = NULL;
+ desc = NULL;
+ ret = 0;
+
+error_free_cert:
+ x509_free_certificate(cert);
+ return ret;
+}
+
+static struct asymmetric_key_parser x509_key_parser = {
+ .owner = THIS_MODULE,
+ .name = "x509",
+ .parse = x509_key_preparse,
+};
+
+/*
+ * Module stuff
+ */
+static int __init x509_key_init(void)
+{
+ return register_asymmetric_key_parser(&x509_key_parser);
+}
+
+static void __exit x509_key_exit(void)
+{
+ unregister_asymmetric_key_parser(&x509_key_parser);
+}
+
+module_init(x509_key_init);
+module_exit(x509_key_exit);
+
+MODULE_DESCRIPTION("X.509 certificate parser");
+MODULE_LICENSE("GPL");
diff --git a/crypto/asymmetric_keys/x509_rsakey.asn1 b/crypto/asymmetric_keys/x509_rsakey.asn1
new file mode 100644
index 00000000000..4ec7cc6532c
--- /dev/null
+++ b/crypto/asymmetric_keys/x509_rsakey.asn1
@@ -0,0 +1,4 @@
+RSAPublicKey ::= SEQUENCE {
+ modulus INTEGER ({ rsa_extract_mpi }), -- n
+ publicExponent INTEGER ({ rsa_extract_mpi }) -- e
+ }
diff --git a/crypto/async_tx/Kconfig b/crypto/async_tx/Kconfig
index d8fb3914598..f38a58aef3e 100644
--- a/crypto/async_tx/Kconfig
+++ b/crypto/async_tx/Kconfig
@@ -10,7 +10,18 @@ config ASYNC_XOR
select ASYNC_CORE
select XOR_BLOCKS
-config ASYNC_MEMSET
+config ASYNC_PQ
tristate
select ASYNC_CORE
+config ASYNC_RAID6_RECOV
+ tristate
+ select ASYNC_CORE
+ select ASYNC_PQ
+ select ASYNC_XOR
+
+config ASYNC_TX_DISABLE_PQ_VAL_DMA
+ bool
+
+config ASYNC_TX_DISABLE_XOR_VAL_DMA
+ bool
diff --git a/crypto/async_tx/Makefile b/crypto/async_tx/Makefile
index 27baa7d52fb..462e4abbfe6 100644
--- a/crypto/async_tx/Makefile
+++ b/crypto/async_tx/Makefile
@@ -1,4 +1,6 @@
obj-$(CONFIG_ASYNC_CORE) += async_tx.o
obj-$(CONFIG_ASYNC_MEMCPY) += async_memcpy.o
-obj-$(CONFIG_ASYNC_MEMSET) += async_memset.o
obj-$(CONFIG_ASYNC_XOR) += async_xor.o
+obj-$(CONFIG_ASYNC_PQ) += async_pq.o
+obj-$(CONFIG_ASYNC_RAID6_RECOV) += async_raid6_recov.o
+obj-$(CONFIG_ASYNC_RAID6_TEST) += raid6test.o
diff --git a/crypto/async_tx/async_memcpy.c b/crypto/async_tx/async_memcpy.c
index ddccfb01c41..f8c0b8dbeb7 100644
--- a/crypto/async_tx/async_memcpy.c
+++ b/crypto/async_tx/async_memcpy.c
@@ -25,6 +25,7 @@
*/
#include <linux/kernel.h>
#include <linux/highmem.h>
+#include <linux/module.h>
#include <linux/mm.h>
#include <linux/dma-mapping.h>
#include <linux/async_tx.h>
@@ -33,76 +34,77 @@
* async_memcpy - attempt to copy memory with a dma engine.
* @dest: destination page
* @src: src page
- * @offset: offset in pages to start transaction
+ * @dest_offset: offset into 'dest' to start transaction
+ * @src_offset: offset into 'src' to start transaction
* @len: length in bytes
- * @flags: ASYNC_TX_ACK, ASYNC_TX_DEP_ACK,
- * @depend_tx: memcpy depends on the result of this transaction
- * @cb_fn: function to call when the memcpy completes
- * @cb_param: parameter to pass to the callback routine
+ * @submit: submission / completion modifiers
+ *
+ * honored flags: ASYNC_TX_ACK
*/
struct dma_async_tx_descriptor *
async_memcpy(struct page *dest, struct page *src, unsigned int dest_offset,
- unsigned int src_offset, size_t len, enum async_tx_flags flags,
- struct dma_async_tx_descriptor *depend_tx,
- dma_async_tx_callback cb_fn, void *cb_param)
+ unsigned int src_offset, size_t len,
+ struct async_submit_ctl *submit)
{
- struct dma_chan *chan = async_tx_find_channel(depend_tx, DMA_MEMCPY,
+ struct dma_chan *chan = async_tx_find_channel(submit, DMA_MEMCPY,
&dest, 1, &src, 1, len);
struct dma_device *device = chan ? chan->device : NULL;
struct dma_async_tx_descriptor *tx = NULL;
-
- if (device) {
- dma_addr_t dma_dest, dma_src;
- unsigned long dma_prep_flags = cb_fn ? DMA_PREP_INTERRUPT : 0;
-
- dma_dest = dma_map_page(device->dev, dest, dest_offset, len,
- DMA_FROM_DEVICE);
-
- dma_src = dma_map_page(device->dev, src, src_offset, len,
- DMA_TO_DEVICE);
-
- tx = device->device_prep_dma_memcpy(chan, dma_dest, dma_src,
- len, dma_prep_flags);
+ struct dmaengine_unmap_data *unmap = NULL;
+
+ if (device)
+ unmap = dmaengine_get_unmap_data(device->dev, 2, GFP_NOIO);
+
+ if (unmap && is_dma_copy_aligned(device, src_offset, dest_offset, len)) {
+ unsigned long dma_prep_flags = 0;
+
+ if (submit->cb_fn)
+ dma_prep_flags |= DMA_PREP_INTERRUPT;
+ if (submit->flags & ASYNC_TX_FENCE)
+ dma_prep_flags |= DMA_PREP_FENCE;
+
+ unmap->to_cnt = 1;
+ unmap->addr[0] = dma_map_page(device->dev, src, src_offset, len,
+ DMA_TO_DEVICE);
+ unmap->from_cnt = 1;
+ unmap->addr[1] = dma_map_page(device->dev, dest, dest_offset, len,
+ DMA_FROM_DEVICE);
+ unmap->len = len;
+
+ tx = device->device_prep_dma_memcpy(chan, unmap->addr[1],
+ unmap->addr[0], len,
+ dma_prep_flags);
}
if (tx) {
pr_debug("%s: (async) len: %zu\n", __func__, len);
- async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param);
+
+ dma_set_unmap(tx, unmap);
+ async_tx_submit(chan, tx, submit);
} else {
void *dest_buf, *src_buf;
pr_debug("%s: (sync) len: %zu\n", __func__, len);
/* wait for any prerequisite operations */
- async_tx_quiesce(&depend_tx);
+ async_tx_quiesce(&submit->depend_tx);
- dest_buf = kmap_atomic(dest, KM_USER0) + dest_offset;
- src_buf = kmap_atomic(src, KM_USER1) + src_offset;
+ dest_buf = kmap_atomic(dest) + dest_offset;
+ src_buf = kmap_atomic(src) + src_offset;
memcpy(dest_buf, src_buf, len);
- kunmap_atomic(dest_buf, KM_USER0);
- kunmap_atomic(src_buf, KM_USER1);
+ kunmap_atomic(src_buf);
+ kunmap_atomic(dest_buf);
- async_tx_sync_epilog(cb_fn, cb_param);
+ async_tx_sync_epilog(submit);
}
+ dmaengine_unmap_put(unmap);
+
return tx;
}
EXPORT_SYMBOL_GPL(async_memcpy);
-static int __init async_memcpy_init(void)
-{
- return 0;
-}
-
-static void __exit async_memcpy_exit(void)
-{
- do { } while (0);
-}
-
-module_init(async_memcpy_init);
-module_exit(async_memcpy_exit);
-
MODULE_AUTHOR("Intel Corporation");
MODULE_DESCRIPTION("asynchronous memcpy api");
MODULE_LICENSE("GPL");
diff --git a/crypto/async_tx/async_memset.c b/crypto/async_tx/async_memset.c
deleted file mode 100644
index 5b5eb99bb24..00000000000
--- a/crypto/async_tx/async_memset.c
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * memory fill offload engine support
- *
- * Copyright © 2006, Intel Corporation.
- *
- * Dan Williams <dan.j.williams@intel.com>
- *
- * with architecture considerations by:
- * Neil Brown <neilb@suse.de>
- * Jeff Garzik <jeff@garzik.org>
- *
- * This program is free software; you can redistribute it and/or modify it
- * under the terms and conditions of the GNU General Public License,
- * version 2, as published by the Free Software Foundation.
- *
- * This program is distributed in the hope it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
- * more details.
- *
- * You should have received a copy of the GNU General Public License along with
- * this program; if not, write to the Free Software Foundation, Inc.,
- * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
- *
- */
-#include <linux/kernel.h>
-#include <linux/interrupt.h>
-#include <linux/mm.h>
-#include <linux/dma-mapping.h>
-#include <linux/async_tx.h>
-
-/**
- * async_memset - attempt to fill memory with a dma engine.
- * @dest: destination page
- * @val: fill value
- * @offset: offset in pages to start transaction
- * @len: length in bytes
- * @flags: ASYNC_TX_ACK, ASYNC_TX_DEP_ACK
- * @depend_tx: memset depends on the result of this transaction
- * @cb_fn: function to call when the memcpy completes
- * @cb_param: parameter to pass to the callback routine
- */
-struct dma_async_tx_descriptor *
-async_memset(struct page *dest, int val, unsigned int offset,
- size_t len, enum async_tx_flags flags,
- struct dma_async_tx_descriptor *depend_tx,
- dma_async_tx_callback cb_fn, void *cb_param)
-{
- struct dma_chan *chan = async_tx_find_channel(depend_tx, DMA_MEMSET,
- &dest, 1, NULL, 0, len);
- struct dma_device *device = chan ? chan->device : NULL;
- struct dma_async_tx_descriptor *tx = NULL;
-
- if (device) {
- dma_addr_t dma_dest;
- unsigned long dma_prep_flags = cb_fn ? DMA_PREP_INTERRUPT : 0;
-
- dma_dest = dma_map_page(device->dev, dest, offset, len,
- DMA_FROM_DEVICE);
-
- tx = device->device_prep_dma_memset(chan, dma_dest, val, len,
- dma_prep_flags);
- }
-
- if (tx) {
- pr_debug("%s: (async) len: %zu\n", __func__, len);
- async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param);
- } else { /* run the memset synchronously */
- void *dest_buf;
- pr_debug("%s: (sync) len: %zu\n", __func__, len);
-
- dest_buf = (void *) (((char *) page_address(dest)) + offset);
-
- /* wait for any prerequisite operations */
- async_tx_quiesce(&depend_tx);
-
- memset(dest_buf, val, len);
-
- async_tx_sync_epilog(cb_fn, cb_param);
- }
-
- return tx;
-}
-EXPORT_SYMBOL_GPL(async_memset);
-
-static int __init async_memset_init(void)
-{
- return 0;
-}
-
-static void __exit async_memset_exit(void)
-{
- do { } while (0);
-}
-
-module_init(async_memset_init);
-module_exit(async_memset_exit);
-
-MODULE_AUTHOR("Intel Corporation");
-MODULE_DESCRIPTION("asynchronous memset api");
-MODULE_LICENSE("GPL");
diff --git a/crypto/async_tx/async_pq.c b/crypto/async_tx/async_pq.c
new file mode 100644
index 00000000000..d05327caf69
--- /dev/null
+++ b/crypto/async_tx/async_pq.c
@@ -0,0 +1,441 @@
+/*
+ * Copyright(c) 2007 Yuri Tikhonov <yur@emcraft.com>
+ * Copyright(c) 2009 Intel Corporation
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc., 59
+ * Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+ *
+ * The full GNU General Public License is included in this distribution in the
+ * file called COPYING.
+ */
+#include <linux/kernel.h>
+#include <linux/interrupt.h>
+#include <linux/module.h>
+#include <linux/dma-mapping.h>
+#include <linux/raid/pq.h>
+#include <linux/async_tx.h>
+#include <linux/gfp.h>
+
+/**
+ * pq_scribble_page - space to hold throwaway P or Q buffer for
+ * synchronous gen_syndrome
+ */
+static struct page *pq_scribble_page;
+
+/* the struct page *blocks[] parameter passed to async_gen_syndrome()
+ * and async_syndrome_val() contains the 'P' destination address at
+ * blocks[disks-2] and the 'Q' destination address at blocks[disks-1]
+ *
+ * note: these are macros as they are used as lvalues
+ */
+#define P(b, d) (b[d-2])
+#define Q(b, d) (b[d-1])
+
+/**
+ * do_async_gen_syndrome - asynchronously calculate P and/or Q
+ */
+static __async_inline struct dma_async_tx_descriptor *
+do_async_gen_syndrome(struct dma_chan *chan,
+ const unsigned char *scfs, int disks,
+ struct dmaengine_unmap_data *unmap,
+ enum dma_ctrl_flags dma_flags,
+ struct async_submit_ctl *submit)
+{
+ struct dma_async_tx_descriptor *tx = NULL;
+ struct dma_device *dma = chan->device;
+ enum async_tx_flags flags_orig = submit->flags;
+ dma_async_tx_callback cb_fn_orig = submit->cb_fn;
+ dma_async_tx_callback cb_param_orig = submit->cb_param;
+ int src_cnt = disks - 2;
+ unsigned short pq_src_cnt;
+ dma_addr_t dma_dest[2];
+ int src_off = 0;
+
+ if (submit->flags & ASYNC_TX_FENCE)
+ dma_flags |= DMA_PREP_FENCE;
+
+ while (src_cnt > 0) {
+ submit->flags = flags_orig;
+ pq_src_cnt = min(src_cnt, dma_maxpq(dma, dma_flags));
+ /* if we are submitting additional pqs, leave the chain open,
+ * clear the callback parameters, and leave the destination
+ * buffers mapped
+ */
+ if (src_cnt > pq_src_cnt) {
+ submit->flags &= ~ASYNC_TX_ACK;
+ submit->flags |= ASYNC_TX_FENCE;
+ submit->cb_fn = NULL;
+ submit->cb_param = NULL;
+ } else {
+ submit->cb_fn = cb_fn_orig;
+ submit->cb_param = cb_param_orig;
+ if (cb_fn_orig)
+ dma_flags |= DMA_PREP_INTERRUPT;
+ }
+
+ /* Drivers force forward progress in case they can not provide
+ * a descriptor
+ */
+ for (;;) {
+ dma_dest[0] = unmap->addr[disks - 2];
+ dma_dest[1] = unmap->addr[disks - 1];
+ tx = dma->device_prep_dma_pq(chan, dma_dest,
+ &unmap->addr[src_off],
+ pq_src_cnt,
+ &scfs[src_off], unmap->len,
+ dma_flags);
+ if (likely(tx))
+ break;
+ async_tx_quiesce(&submit->depend_tx);
+ dma_async_issue_pending(chan);
+ }
+
+ dma_set_unmap(tx, unmap);
+ async_tx_submit(chan, tx, submit);
+ submit->depend_tx = tx;
+
+ /* drop completed sources */
+ src_cnt -= pq_src_cnt;
+ src_off += pq_src_cnt;
+
+ dma_flags |= DMA_PREP_CONTINUE;
+ }
+
+ return tx;
+}
+
+/**
+ * do_sync_gen_syndrome - synchronously calculate a raid6 syndrome
+ */
+static void
+do_sync_gen_syndrome(struct page **blocks, unsigned int offset, int disks,
+ size_t len, struct async_submit_ctl *submit)
+{
+ void **srcs;
+ int i;
+
+ if (submit->scribble)
+ srcs = submit->scribble;
+ else
+ srcs = (void **) blocks;
+
+ for (i = 0; i < disks; i++) {
+ if (blocks[i] == NULL) {
+ BUG_ON(i > disks - 3); /* P or Q can't be zero */
+ srcs[i] = (void*)raid6_empty_zero_page;
+ } else
+ srcs[i] = page_address(blocks[i]) + offset;
+ }
+ raid6_call.gen_syndrome(disks, len, srcs);
+ async_tx_sync_epilog(submit);
+}
+
+/**
+ * async_gen_syndrome - asynchronously calculate a raid6 syndrome
+ * @blocks: source blocks from idx 0..disks-3, P @ disks-2 and Q @ disks-1
+ * @offset: common offset into each block (src and dest) to start transaction
+ * @disks: number of blocks (including missing P or Q, see below)
+ * @len: length of operation in bytes
+ * @submit: submission/completion modifiers
+ *
+ * General note: This routine assumes a field of GF(2^8) with a
+ * primitive polynomial of 0x11d and a generator of {02}.
+ *
+ * 'disks' note: callers can optionally omit either P or Q (but not
+ * both) from the calculation by setting blocks[disks-2] or
+ * blocks[disks-1] to NULL. When P or Q is omitted 'len' must be <=
+ * PAGE_SIZE as a temporary buffer of this size is used in the
+ * synchronous path. 'disks' always accounts for both destination
+ * buffers. If any source buffers (blocks[i] where i < disks - 2) are
+ * set to NULL those buffers will be replaced with the raid6_zero_page
+ * in the synchronous path and omitted in the hardware-asynchronous
+ * path.
+ */
+struct dma_async_tx_descriptor *
+async_gen_syndrome(struct page **blocks, unsigned int offset, int disks,
+ size_t len, struct async_submit_ctl *submit)
+{
+ int src_cnt = disks - 2;
+ struct dma_chan *chan = async_tx_find_channel(submit, DMA_PQ,
+ &P(blocks, disks), 2,
+ blocks, src_cnt, len);
+ struct dma_device *device = chan ? chan->device : NULL;
+ struct dmaengine_unmap_data *unmap = NULL;
+
+ BUG_ON(disks > 255 || !(P(blocks, disks) || Q(blocks, disks)));
+
+ if (device)
+ unmap = dmaengine_get_unmap_data(device->dev, disks, GFP_NOIO);
+
+ if (unmap &&
+ (src_cnt <= dma_maxpq(device, 0) ||
+ dma_maxpq(device, DMA_PREP_CONTINUE) > 0) &&
+ is_dma_pq_aligned(device, offset, 0, len)) {
+ struct dma_async_tx_descriptor *tx;
+ enum dma_ctrl_flags dma_flags = 0;
+ unsigned char coefs[src_cnt];
+ int i, j;
+
+ /* run the p+q asynchronously */
+ pr_debug("%s: (async) disks: %d len: %zu\n",
+ __func__, disks, len);
+
+ /* convert source addresses being careful to collapse 'empty'
+ * sources and update the coefficients accordingly
+ */
+ unmap->len = len;
+ for (i = 0, j = 0; i < src_cnt; i++) {
+ if (blocks[i] == NULL)
+ continue;
+ unmap->addr[j] = dma_map_page(device->dev, blocks[i], offset,
+ len, DMA_TO_DEVICE);
+ coefs[j] = raid6_gfexp[i];
+ unmap->to_cnt++;
+ j++;
+ }
+
+ /*
+ * DMAs use destinations as sources,
+ * so use BIDIRECTIONAL mapping
+ */
+ unmap->bidi_cnt++;
+ if (P(blocks, disks))
+ unmap->addr[j++] = dma_map_page(device->dev, P(blocks, disks),
+ offset, len, DMA_BIDIRECTIONAL);
+ else {
+ unmap->addr[j++] = 0;
+ dma_flags |= DMA_PREP_PQ_DISABLE_P;
+ }
+
+ unmap->bidi_cnt++;
+ if (Q(blocks, disks))
+ unmap->addr[j++] = dma_map_page(device->dev, Q(blocks, disks),
+ offset, len, DMA_BIDIRECTIONAL);
+ else {
+ unmap->addr[j++] = 0;
+ dma_flags |= DMA_PREP_PQ_DISABLE_Q;
+ }
+
+ tx = do_async_gen_syndrome(chan, coefs, j, unmap, dma_flags, submit);
+ dmaengine_unmap_put(unmap);
+ return tx;
+ }
+
+ dmaengine_unmap_put(unmap);
+
+ /* run the pq synchronously */
+ pr_debug("%s: (sync) disks: %d len: %zu\n", __func__, disks, len);
+
+ /* wait for any prerequisite operations */
+ async_tx_quiesce(&submit->depend_tx);
+
+ if (!P(blocks, disks)) {
+ P(blocks, disks) = pq_scribble_page;
+ BUG_ON(len + offset > PAGE_SIZE);
+ }
+ if (!Q(blocks, disks)) {
+ Q(blocks, disks) = pq_scribble_page;
+ BUG_ON(len + offset > PAGE_SIZE);
+ }
+ do_sync_gen_syndrome(blocks, offset, disks, len, submit);
+
+ return NULL;
+}
+EXPORT_SYMBOL_GPL(async_gen_syndrome);
+
+static inline struct dma_chan *
+pq_val_chan(struct async_submit_ctl *submit, struct page **blocks, int disks, size_t len)
+{
+ #ifdef CONFIG_ASYNC_TX_DISABLE_PQ_VAL_DMA
+ return NULL;
+ #endif
+ return async_tx_find_channel(submit, DMA_PQ_VAL, NULL, 0, blocks,
+ disks, len);
+}
+
+/**
+ * async_syndrome_val - asynchronously validate a raid6 syndrome
+ * @blocks: source blocks from idx 0..disks-3, P @ disks-2 and Q @ disks-1
+ * @offset: common offset into each block (src and dest) to start transaction
+ * @disks: number of blocks (including missing P or Q, see below)
+ * @len: length of operation in bytes
+ * @pqres: on val failure SUM_CHECK_P_RESULT and/or SUM_CHECK_Q_RESULT are set
+ * @spare: temporary result buffer for the synchronous case
+ * @submit: submission / completion modifiers
+ *
+ * The same notes from async_gen_syndrome apply to the 'blocks',
+ * and 'disks' parameters of this routine. The synchronous path
+ * requires a temporary result buffer and submit->scribble to be
+ * specified.
+ */
+struct dma_async_tx_descriptor *
+async_syndrome_val(struct page **blocks, unsigned int offset, int disks,
+ size_t len, enum sum_check_flags *pqres, struct page *spare,
+ struct async_submit_ctl *submit)
+{
+ struct dma_chan *chan = pq_val_chan(submit, blocks, disks, len);
+ struct dma_device *device = chan ? chan->device : NULL;
+ struct dma_async_tx_descriptor *tx;
+ unsigned char coefs[disks-2];
+ enum dma_ctrl_flags dma_flags = submit->cb_fn ? DMA_PREP_INTERRUPT : 0;
+ struct dmaengine_unmap_data *unmap = NULL;
+
+ BUG_ON(disks < 4);
+
+ if (device)
+ unmap = dmaengine_get_unmap_data(device->dev, disks, GFP_NOIO);
+
+ if (unmap && disks <= dma_maxpq(device, 0) &&
+ is_dma_pq_aligned(device, offset, 0, len)) {
+ struct device *dev = device->dev;
+ dma_addr_t pq[2];
+ int i, j = 0, src_cnt = 0;
+
+ pr_debug("%s: (async) disks: %d len: %zu\n",
+ __func__, disks, len);
+
+ unmap->len = len;
+ for (i = 0; i < disks-2; i++)
+ if (likely(blocks[i])) {
+ unmap->addr[j] = dma_map_page(dev, blocks[i],
+ offset, len,
+ DMA_TO_DEVICE);
+ coefs[j] = raid6_gfexp[i];
+ unmap->to_cnt++;
+ src_cnt++;
+ j++;
+ }
+
+ if (!P(blocks, disks)) {
+ pq[0] = 0;
+ dma_flags |= DMA_PREP_PQ_DISABLE_P;
+ } else {
+ pq[0] = dma_map_page(dev, P(blocks, disks),
+ offset, len,
+ DMA_TO_DEVICE);
+ unmap->addr[j++] = pq[0];
+ unmap->to_cnt++;
+ }
+ if (!Q(blocks, disks)) {
+ pq[1] = 0;
+ dma_flags |= DMA_PREP_PQ_DISABLE_Q;
+ } else {
+ pq[1] = dma_map_page(dev, Q(blocks, disks),
+ offset, len,
+ DMA_TO_DEVICE);
+ unmap->addr[j++] = pq[1];
+ unmap->to_cnt++;
+ }
+
+ if (submit->flags & ASYNC_TX_FENCE)
+ dma_flags |= DMA_PREP_FENCE;
+ for (;;) {
+ tx = device->device_prep_dma_pq_val(chan, pq,
+ unmap->addr,
+ src_cnt,
+ coefs,
+ len, pqres,
+ dma_flags);
+ if (likely(tx))
+ break;
+ async_tx_quiesce(&submit->depend_tx);
+ dma_async_issue_pending(chan);
+ }
+
+ dma_set_unmap(tx, unmap);
+ async_tx_submit(chan, tx, submit);
+
+ return tx;
+ } else {
+ struct page *p_src = P(blocks, disks);
+ struct page *q_src = Q(blocks, disks);
+ enum async_tx_flags flags_orig = submit->flags;
+ dma_async_tx_callback cb_fn_orig = submit->cb_fn;
+ void *scribble = submit->scribble;
+ void *cb_param_orig = submit->cb_param;
+ void *p, *q, *s;
+
+ pr_debug("%s: (sync) disks: %d len: %zu\n",
+ __func__, disks, len);
+
+ /* caller must provide a temporary result buffer and
+ * allow the input parameters to be preserved
+ */
+ BUG_ON(!spare || !scribble);
+
+ /* wait for any prerequisite operations */
+ async_tx_quiesce(&submit->depend_tx);
+
+ /* recompute p and/or q into the temporary buffer and then
+ * check to see the result matches the current value
+ */
+ tx = NULL;
+ *pqres = 0;
+ if (p_src) {
+ init_async_submit(submit, ASYNC_TX_XOR_ZERO_DST, NULL,
+ NULL, NULL, scribble);
+ tx = async_xor(spare, blocks, offset, disks-2, len, submit);
+ async_tx_quiesce(&tx);
+ p = page_address(p_src) + offset;
+ s = page_address(spare) + offset;
+ *pqres |= !!memcmp(p, s, len) << SUM_CHECK_P;
+ }
+
+ if (q_src) {
+ P(blocks, disks) = NULL;
+ Q(blocks, disks) = spare;
+ init_async_submit(submit, 0, NULL, NULL, NULL, scribble);
+ tx = async_gen_syndrome(blocks, offset, disks, len, submit);
+ async_tx_quiesce(&tx);
+ q = page_address(q_src) + offset;
+ s = page_address(spare) + offset;
+ *pqres |= !!memcmp(q, s, len) << SUM_CHECK_Q;
+ }
+
+ /* restore P, Q and submit */
+ P(blocks, disks) = p_src;
+ Q(blocks, disks) = q_src;
+
+ submit->cb_fn = cb_fn_orig;
+ submit->cb_param = cb_param_orig;
+ submit->flags = flags_orig;
+ async_tx_sync_epilog(submit);
+
+ return NULL;
+ }
+}
+EXPORT_SYMBOL_GPL(async_syndrome_val);
+
+static int __init async_pq_init(void)
+{
+ pq_scribble_page = alloc_page(GFP_KERNEL);
+
+ if (pq_scribble_page)
+ return 0;
+
+ pr_err("%s: failed to allocate required spare page\n", __func__);
+
+ return -ENOMEM;
+}
+
+static void __exit async_pq_exit(void)
+{
+ put_page(pq_scribble_page);
+}
+
+module_init(async_pq_init);
+module_exit(async_pq_exit);
+
+MODULE_DESCRIPTION("asynchronous raid6 syndrome generation/validation");
+MODULE_LICENSE("GPL");
diff --git a/crypto/async_tx/async_raid6_recov.c b/crypto/async_tx/async_raid6_recov.c
new file mode 100644
index 00000000000..934a8498149
--- /dev/null
+++ b/crypto/async_tx/async_raid6_recov.c
@@ -0,0 +1,531 @@
+/*
+ * Asynchronous RAID-6 recovery calculations ASYNC_TX API.
+ * Copyright(c) 2009 Intel Corporation
+ *
+ * based on raid6recov.c:
+ * Copyright 2002 H. Peter Anvin
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc., 51
+ * Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ */
+#include <linux/kernel.h>
+#include <linux/interrupt.h>
+#include <linux/module.h>
+#include <linux/dma-mapping.h>
+#include <linux/raid/pq.h>
+#include <linux/async_tx.h>
+#include <linux/dmaengine.h>
+
+static struct dma_async_tx_descriptor *
+async_sum_product(struct page *dest, struct page **srcs, unsigned char *coef,
+ size_t len, struct async_submit_ctl *submit)
+{
+ struct dma_chan *chan = async_tx_find_channel(submit, DMA_PQ,
+ &dest, 1, srcs, 2, len);
+ struct dma_device *dma = chan ? chan->device : NULL;
+ struct dmaengine_unmap_data *unmap = NULL;
+ const u8 *amul, *bmul;
+ u8 ax, bx;
+ u8 *a, *b, *c;
+
+ if (dma)
+ unmap = dmaengine_get_unmap_data(dma->dev, 3, GFP_NOIO);
+
+ if (unmap) {
+ struct device *dev = dma->dev;
+ dma_addr_t pq[2];
+ struct dma_async_tx_descriptor *tx;
+ enum dma_ctrl_flags dma_flags = DMA_PREP_PQ_DISABLE_P;
+
+ if (submit->flags & ASYNC_TX_FENCE)
+ dma_flags |= DMA_PREP_FENCE;
+ unmap->addr[0] = dma_map_page(dev, srcs[0], 0, len, DMA_TO_DEVICE);
+ unmap->addr[1] = dma_map_page(dev, srcs[1], 0, len, DMA_TO_DEVICE);
+ unmap->to_cnt = 2;
+
+ unmap->addr[2] = dma_map_page(dev, dest, 0, len, DMA_BIDIRECTIONAL);
+ unmap->bidi_cnt = 1;
+ /* engine only looks at Q, but expects it to follow P */
+ pq[1] = unmap->addr[2];
+
+ unmap->len = len;
+ tx = dma->device_prep_dma_pq(chan, pq, unmap->addr, 2, coef,
+ len, dma_flags);
+ if (tx) {
+ dma_set_unmap(tx, unmap);
+ async_tx_submit(chan, tx, submit);
+ dmaengine_unmap_put(unmap);
+ return tx;
+ }
+
+ /* could not get a descriptor, unmap and fall through to
+ * the synchronous path
+ */
+ dmaengine_unmap_put(unmap);
+ }
+
+ /* run the operation synchronously */
+ async_tx_quiesce(&submit->depend_tx);
+ amul = raid6_gfmul[coef[0]];
+ bmul = raid6_gfmul[coef[1]];
+ a = page_address(srcs[0]);
+ b = page_address(srcs[1]);
+ c = page_address(dest);
+
+ while (len--) {
+ ax = amul[*a++];
+ bx = bmul[*b++];
+ *c++ = ax ^ bx;
+ }
+
+ return NULL;
+}
+
+static struct dma_async_tx_descriptor *
+async_mult(struct page *dest, struct page *src, u8 coef, size_t len,
+ struct async_submit_ctl *submit)
+{
+ struct dma_chan *chan = async_tx_find_channel(submit, DMA_PQ,
+ &dest, 1, &src, 1, len);
+ struct dma_device *dma = chan ? chan->device : NULL;
+ struct dmaengine_unmap_data *unmap = NULL;
+ const u8 *qmul; /* Q multiplier table */
+ u8 *d, *s;
+
+ if (dma)
+ unmap = dmaengine_get_unmap_data(dma->dev, 3, GFP_NOIO);
+
+ if (unmap) {
+ dma_addr_t dma_dest[2];
+ struct device *dev = dma->dev;
+ struct dma_async_tx_descriptor *tx;
+ enum dma_ctrl_flags dma_flags = DMA_PREP_PQ_DISABLE_P;
+
+ if (submit->flags & ASYNC_TX_FENCE)
+ dma_flags |= DMA_PREP_FENCE;
+ unmap->addr[0] = dma_map_page(dev, src, 0, len, DMA_TO_DEVICE);
+ unmap->to_cnt++;
+ unmap->addr[1] = dma_map_page(dev, dest, 0, len, DMA_BIDIRECTIONAL);
+ dma_dest[1] = unmap->addr[1];
+ unmap->bidi_cnt++;
+ unmap->len = len;
+
+ /* this looks funny, but the engine looks for Q at
+ * dma_dest[1] and ignores dma_dest[0] as a dest
+ * due to DMA_PREP_PQ_DISABLE_P
+ */
+ tx = dma->device_prep_dma_pq(chan, dma_dest, unmap->addr,
+ 1, &coef, len, dma_flags);
+
+ if (tx) {
+ dma_set_unmap(tx, unmap);
+ dmaengine_unmap_put(unmap);
+ async_tx_submit(chan, tx, submit);
+ return tx;
+ }
+
+ /* could not get a descriptor, unmap and fall through to
+ * the synchronous path
+ */
+ dmaengine_unmap_put(unmap);
+ }
+
+ /* no channel available, or failed to allocate a descriptor, so
+ * perform the operation synchronously
+ */
+ async_tx_quiesce(&submit->depend_tx);
+ qmul = raid6_gfmul[coef];
+ d = page_address(dest);
+ s = page_address(src);
+
+ while (len--)
+ *d++ = qmul[*s++];
+
+ return NULL;
+}
+
+static struct dma_async_tx_descriptor *
+__2data_recov_4(int disks, size_t bytes, int faila, int failb,
+ struct page **blocks, struct async_submit_ctl *submit)
+{
+ struct dma_async_tx_descriptor *tx = NULL;
+ struct page *p, *q, *a, *b;
+ struct page *srcs[2];
+ unsigned char coef[2];
+ enum async_tx_flags flags = submit->flags;
+ dma_async_tx_callback cb_fn = submit->cb_fn;
+ void *cb_param = submit->cb_param;
+ void *scribble = submit->scribble;
+
+ p = blocks[disks-2];
+ q = blocks[disks-1];
+
+ a = blocks[faila];
+ b = blocks[failb];
+
+ /* in the 4 disk case P + Pxy == P and Q + Qxy == Q */
+ /* Dx = A*(P+Pxy) + B*(Q+Qxy) */
+ srcs[0] = p;
+ srcs[1] = q;
+ coef[0] = raid6_gfexi[failb-faila];
+ coef[1] = raid6_gfinv[raid6_gfexp[faila]^raid6_gfexp[failb]];
+ init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble);
+ tx = async_sum_product(b, srcs, coef, bytes, submit);
+
+ /* Dy = P+Pxy+Dx */
+ srcs[0] = p;
+ srcs[1] = b;
+ init_async_submit(submit, flags | ASYNC_TX_XOR_ZERO_DST, tx, cb_fn,
+ cb_param, scribble);
+ tx = async_xor(a, srcs, 0, 2, bytes, submit);
+
+ return tx;
+
+}
+
+static struct dma_async_tx_descriptor *
+__2data_recov_5(int disks, size_t bytes, int faila, int failb,
+ struct page **blocks, struct async_submit_ctl *submit)
+{
+ struct dma_async_tx_descriptor *tx = NULL;
+ struct page *p, *q, *g, *dp, *dq;
+ struct page *srcs[2];
+ unsigned char coef[2];
+ enum async_tx_flags flags = submit->flags;
+ dma_async_tx_callback cb_fn = submit->cb_fn;
+ void *cb_param = submit->cb_param;
+ void *scribble = submit->scribble;
+ int good_srcs, good, i;
+
+ good_srcs = 0;
+ good = -1;
+ for (i = 0; i < disks-2; i++) {
+ if (blocks[i] == NULL)
+ continue;
+ if (i == faila || i == failb)
+ continue;
+ good = i;
+ good_srcs++;
+ }
+ BUG_ON(good_srcs > 1);
+
+ p = blocks[disks-2];
+ q = blocks[disks-1];
+ g = blocks[good];
+
+ /* Compute syndrome with zero for the missing data pages
+ * Use the dead data pages as temporary storage for delta p and
+ * delta q
+ */
+ dp = blocks[faila];
+ dq = blocks[failb];
+
+ init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble);
+ tx = async_memcpy(dp, g, 0, 0, bytes, submit);
+ init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble);
+ tx = async_mult(dq, g, raid6_gfexp[good], bytes, submit);
+
+ /* compute P + Pxy */
+ srcs[0] = dp;
+ srcs[1] = p;
+ init_async_submit(submit, ASYNC_TX_FENCE|ASYNC_TX_XOR_DROP_DST, tx,
+ NULL, NULL, scribble);
+ tx = async_xor(dp, srcs, 0, 2, bytes, submit);
+
+ /* compute Q + Qxy */
+ srcs[0] = dq;
+ srcs[1] = q;
+ init_async_submit(submit, ASYNC_TX_FENCE|ASYNC_TX_XOR_DROP_DST, tx,
+ NULL, NULL, scribble);
+ tx = async_xor(dq, srcs, 0, 2, bytes, submit);
+
+ /* Dx = A*(P+Pxy) + B*(Q+Qxy) */
+ srcs[0] = dp;
+ srcs[1] = dq;
+ coef[0] = raid6_gfexi[failb-faila];
+ coef[1] = raid6_gfinv[raid6_gfexp[faila]^raid6_gfexp[failb]];
+ init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble);
+ tx = async_sum_product(dq, srcs, coef, bytes, submit);
+
+ /* Dy = P+Pxy+Dx */
+ srcs[0] = dp;
+ srcs[1] = dq;
+ init_async_submit(submit, flags | ASYNC_TX_XOR_DROP_DST, tx, cb_fn,
+ cb_param, scribble);
+ tx = async_xor(dp, srcs, 0, 2, bytes, submit);
+
+ return tx;
+}
+
+static struct dma_async_tx_descriptor *
+__2data_recov_n(int disks, size_t bytes, int faila, int failb,
+ struct page **blocks, struct async_submit_ctl *submit)
+{
+ struct dma_async_tx_descriptor *tx = NULL;
+ struct page *p, *q, *dp, *dq;
+ struct page *srcs[2];
+ unsigned char coef[2];
+ enum async_tx_flags flags = submit->flags;
+ dma_async_tx_callback cb_fn = submit->cb_fn;
+ void *cb_param = submit->cb_param;
+ void *scribble = submit->scribble;
+
+ p = blocks[disks-2];
+ q = blocks[disks-1];
+
+ /* Compute syndrome with zero for the missing data pages
+ * Use the dead data pages as temporary storage for
+ * delta p and delta q
+ */
+ dp = blocks[faila];
+ blocks[faila] = NULL;
+ blocks[disks-2] = dp;
+ dq = blocks[failb];
+ blocks[failb] = NULL;
+ blocks[disks-1] = dq;
+
+ init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble);
+ tx = async_gen_syndrome(blocks, 0, disks, bytes, submit);
+
+ /* Restore pointer table */
+ blocks[faila] = dp;
+ blocks[failb] = dq;
+ blocks[disks-2] = p;
+ blocks[disks-1] = q;
+
+ /* compute P + Pxy */
+ srcs[0] = dp;
+ srcs[1] = p;
+ init_async_submit(submit, ASYNC_TX_FENCE|ASYNC_TX_XOR_DROP_DST, tx,
+ NULL, NULL, scribble);
+ tx = async_xor(dp, srcs, 0, 2, bytes, submit);
+
+ /* compute Q + Qxy */
+ srcs[0] = dq;
+ srcs[1] = q;
+ init_async_submit(submit, ASYNC_TX_FENCE|ASYNC_TX_XOR_DROP_DST, tx,
+ NULL, NULL, scribble);
+ tx = async_xor(dq, srcs, 0, 2, bytes, submit);
+
+ /* Dx = A*(P+Pxy) + B*(Q+Qxy) */
+ srcs[0] = dp;
+ srcs[1] = dq;
+ coef[0] = raid6_gfexi[failb-faila];
+ coef[1] = raid6_gfinv[raid6_gfexp[faila]^raid6_gfexp[failb]];
+ init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble);
+ tx = async_sum_product(dq, srcs, coef, bytes, submit);
+
+ /* Dy = P+Pxy+Dx */
+ srcs[0] = dp;
+ srcs[1] = dq;
+ init_async_submit(submit, flags | ASYNC_TX_XOR_DROP_DST, tx, cb_fn,
+ cb_param, scribble);
+ tx = async_xor(dp, srcs, 0, 2, bytes, submit);
+
+ return tx;
+}
+
+/**
+ * async_raid6_2data_recov - asynchronously calculate two missing data blocks
+ * @disks: number of disks in the RAID-6 array
+ * @bytes: block size
+ * @faila: first failed drive index
+ * @failb: second failed drive index
+ * @blocks: array of source pointers where the last two entries are p and q
+ * @submit: submission/completion modifiers
+ */
+struct dma_async_tx_descriptor *
+async_raid6_2data_recov(int disks, size_t bytes, int faila, int failb,
+ struct page **blocks, struct async_submit_ctl *submit)
+{
+ void *scribble = submit->scribble;
+ int non_zero_srcs, i;
+
+ BUG_ON(faila == failb);
+ if (failb < faila)
+ swap(faila, failb);
+
+ pr_debug("%s: disks: %d len: %zu\n", __func__, disks, bytes);
+
+ /* if a dma resource is not available or a scribble buffer is not
+ * available punt to the synchronous path. In the 'dma not
+ * available' case be sure to use the scribble buffer to
+ * preserve the content of 'blocks' as the caller intended.
+ */
+ if (!async_dma_find_channel(DMA_PQ) || !scribble) {
+ void **ptrs = scribble ? scribble : (void **) blocks;
+
+ async_tx_quiesce(&submit->depend_tx);
+ for (i = 0; i < disks; i++)
+ if (blocks[i] == NULL)
+ ptrs[i] = (void *) raid6_empty_zero_page;
+ else
+ ptrs[i] = page_address(blocks[i]);
+
+ raid6_2data_recov(disks, bytes, faila, failb, ptrs);
+
+ async_tx_sync_epilog(submit);
+
+ return NULL;
+ }
+
+ non_zero_srcs = 0;
+ for (i = 0; i < disks-2 && non_zero_srcs < 4; i++)
+ if (blocks[i])
+ non_zero_srcs++;
+ switch (non_zero_srcs) {
+ case 0:
+ case 1:
+ /* There must be at least 2 sources - the failed devices. */
+ BUG();
+
+ case 2:
+ /* dma devices do not uniformly understand a zero source pq
+ * operation (in contrast to the synchronous case), so
+ * explicitly handle the special case of a 4 disk array with
+ * both data disks missing.
+ */
+ return __2data_recov_4(disks, bytes, faila, failb, blocks, submit);
+ case 3:
+ /* dma devices do not uniformly understand a single
+ * source pq operation (in contrast to the synchronous
+ * case), so explicitly handle the special case of a 5 disk
+ * array with 2 of 3 data disks missing.
+ */
+ return __2data_recov_5(disks, bytes, faila, failb, blocks, submit);
+ default:
+ return __2data_recov_n(disks, bytes, faila, failb, blocks, submit);
+ }
+}
+EXPORT_SYMBOL_GPL(async_raid6_2data_recov);
+
+/**
+ * async_raid6_datap_recov - asynchronously calculate a data and the 'p' block
+ * @disks: number of disks in the RAID-6 array
+ * @bytes: block size
+ * @faila: failed drive index
+ * @blocks: array of source pointers where the last two entries are p and q
+ * @submit: submission/completion modifiers
+ */
+struct dma_async_tx_descriptor *
+async_raid6_datap_recov(int disks, size_t bytes, int faila,
+ struct page **blocks, struct async_submit_ctl *submit)
+{
+ struct dma_async_tx_descriptor *tx = NULL;
+ struct page *p, *q, *dq;
+ u8 coef;
+ enum async_tx_flags flags = submit->flags;
+ dma_async_tx_callback cb_fn = submit->cb_fn;
+ void *cb_param = submit->cb_param;
+ void *scribble = submit->scribble;
+ int good_srcs, good, i;
+ struct page *srcs[2];
+
+ pr_debug("%s: disks: %d len: %zu\n", __func__, disks, bytes);
+
+ /* if a dma resource is not available or a scribble buffer is not
+ * available punt to the synchronous path. In the 'dma not
+ * available' case be sure to use the scribble buffer to
+ * preserve the content of 'blocks' as the caller intended.
+ */
+ if (!async_dma_find_channel(DMA_PQ) || !scribble) {
+ void **ptrs = scribble ? scribble : (void **) blocks;
+
+ async_tx_quiesce(&submit->depend_tx);
+ for (i = 0; i < disks; i++)
+ if (blocks[i] == NULL)
+ ptrs[i] = (void*)raid6_empty_zero_page;
+ else
+ ptrs[i] = page_address(blocks[i]);
+
+ raid6_datap_recov(disks, bytes, faila, ptrs);
+
+ async_tx_sync_epilog(submit);
+
+ return NULL;
+ }
+
+ good_srcs = 0;
+ good = -1;
+ for (i = 0; i < disks-2; i++) {
+ if (i == faila)
+ continue;
+ if (blocks[i]) {
+ good = i;
+ good_srcs++;
+ if (good_srcs > 1)
+ break;
+ }
+ }
+ BUG_ON(good_srcs == 0);
+
+ p = blocks[disks-2];
+ q = blocks[disks-1];
+
+ /* Compute syndrome with zero for the missing data page
+ * Use the dead data page as temporary storage for delta q
+ */
+ dq = blocks[faila];
+ blocks[faila] = NULL;
+ blocks[disks-1] = dq;
+
+ /* in the 4-disk case we only need to perform a single source
+ * multiplication with the one good data block.
+ */
+ if (good_srcs == 1) {
+ struct page *g = blocks[good];
+
+ init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL,
+ scribble);
+ tx = async_memcpy(p, g, 0, 0, bytes, submit);
+
+ init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL,
+ scribble);
+ tx = async_mult(dq, g, raid6_gfexp[good], bytes, submit);
+ } else {
+ init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL,
+ scribble);
+ tx = async_gen_syndrome(blocks, 0, disks, bytes, submit);
+ }
+
+ /* Restore pointer table */
+ blocks[faila] = dq;
+ blocks[disks-1] = q;
+
+ /* calculate g^{-faila} */
+ coef = raid6_gfinv[raid6_gfexp[faila]];
+
+ srcs[0] = dq;
+ srcs[1] = q;
+ init_async_submit(submit, ASYNC_TX_FENCE|ASYNC_TX_XOR_DROP_DST, tx,
+ NULL, NULL, scribble);
+ tx = async_xor(dq, srcs, 0, 2, bytes, submit);
+
+ init_async_submit(submit, ASYNC_TX_FENCE, tx, NULL, NULL, scribble);
+ tx = async_mult(dq, dq, coef, bytes, submit);
+
+ srcs[0] = p;
+ srcs[1] = dq;
+ init_async_submit(submit, flags | ASYNC_TX_XOR_DROP_DST, tx, cb_fn,
+ cb_param, scribble);
+ tx = async_xor(p, srcs, 0, 2, bytes, submit);
+
+ return tx;
+}
+EXPORT_SYMBOL_GPL(async_raid6_datap_recov);
+
+MODULE_AUTHOR("Dan Williams <dan.j.williams@intel.com>");
+MODULE_DESCRIPTION("asynchronous RAID-6 recovery api");
+MODULE_LICENSE("GPL");
diff --git a/crypto/async_tx/async_tx.c b/crypto/async_tx/async_tx.c
index f21147f3626..39ea4791a3c 100644
--- a/crypto/async_tx/async_tx.c
+++ b/crypto/async_tx/async_tx.c
@@ -24,13 +24,14 @@
*
*/
#include <linux/rculist.h>
+#include <linux/module.h>
#include <linux/kernel.h>
#include <linux/async_tx.h>
#ifdef CONFIG_DMA_ENGINE
static int __init async_tx_init(void)
{
- dmaengine_get();
+ async_dmaengine_get();
printk(KERN_INFO "async_tx: api initialized (async)\n");
@@ -39,37 +40,31 @@ static int __init async_tx_init(void)
static void __exit async_tx_exit(void)
{
- dmaengine_put();
+ async_dmaengine_put();
}
+module_init(async_tx_init);
+module_exit(async_tx_exit);
+
/**
* __async_tx_find_channel - find a channel to carry out the operation or let
* the transaction execute synchronously
- * @depend_tx: transaction dependency
+ * @submit: transaction dependency and submission modifiers
* @tx_type: transaction type
*/
struct dma_chan *
-__async_tx_find_channel(struct dma_async_tx_descriptor *depend_tx,
- enum dma_transaction_type tx_type)
+__async_tx_find_channel(struct async_submit_ctl *submit,
+ enum dma_transaction_type tx_type)
{
+ struct dma_async_tx_descriptor *depend_tx = submit->depend_tx;
+
/* see if we can keep the chain on one channel */
if (depend_tx &&
dma_has_cap(tx_type, depend_tx->chan->device->cap_mask))
return depend_tx->chan;
- return dma_find_channel(tx_type);
+ return async_dma_find_channel(tx_type);
}
EXPORT_SYMBOL_GPL(__async_tx_find_channel);
-#else
-static int __init async_tx_init(void)
-{
- printk(KERN_INFO "async_tx: api initialized (sync-only)\n");
- return 0;
-}
-
-static void __exit async_tx_exit(void)
-{
- do { } while (0);
-}
#endif
@@ -83,24 +78,23 @@ static void
async_tx_channel_switch(struct dma_async_tx_descriptor *depend_tx,
struct dma_async_tx_descriptor *tx)
{
- struct dma_chan *chan;
- struct dma_device *device;
+ struct dma_chan *chan = depend_tx->chan;
+ struct dma_device *device = chan->device;
struct dma_async_tx_descriptor *intr_tx = (void *) ~0;
/* first check to see if we can still append to depend_tx */
- spin_lock_bh(&depend_tx->lock);
- if (depend_tx->parent && depend_tx->chan == tx->chan) {
- tx->parent = depend_tx;
- depend_tx->next = tx;
+ txd_lock(depend_tx);
+ if (txd_parent(depend_tx) && depend_tx->chan == tx->chan) {
+ txd_chain(depend_tx, tx);
intr_tx = NULL;
}
- spin_unlock_bh(&depend_tx->lock);
+ txd_unlock(depend_tx);
- if (!intr_tx)
+ /* attached dependency, flush the parent channel */
+ if (!intr_tx) {
+ device->device_issue_pending(chan);
return;
-
- chan = depend_tx->chan;
- device = chan->device;
+ }
/* see if we can schedule an interrupt
* otherwise poll for completion
@@ -113,30 +107,29 @@ async_tx_channel_switch(struct dma_async_tx_descriptor *depend_tx,
if (intr_tx) {
intr_tx->callback = NULL;
intr_tx->callback_param = NULL;
- tx->parent = intr_tx;
- /* safe to set ->next outside the lock since we know we are
+ /* safe to chain outside the lock since we know we are
* not submitted yet
*/
- intr_tx->next = tx;
+ txd_chain(intr_tx, tx);
/* check if we need to append */
- spin_lock_bh(&depend_tx->lock);
- if (depend_tx->parent) {
- intr_tx->parent = depend_tx;
- depend_tx->next = intr_tx;
+ txd_lock(depend_tx);
+ if (txd_parent(depend_tx)) {
+ txd_chain(depend_tx, intr_tx);
async_tx_ack(intr_tx);
intr_tx = NULL;
}
- spin_unlock_bh(&depend_tx->lock);
+ txd_unlock(depend_tx);
if (intr_tx) {
- intr_tx->parent = NULL;
+ txd_clear_parent(intr_tx);
intr_tx->tx_submit(intr_tx);
async_tx_ack(intr_tx);
}
+ device->device_issue_pending(chan);
} else {
- if (dma_wait_for_async_tx(depend_tx) == DMA_ERROR)
- panic("%s: DMA_ERROR waiting for depend_tx\n",
+ if (dma_wait_for_async_tx(depend_tx) != DMA_COMPLETE)
+ panic("%s: DMA error waiting for depend_tx\n",
__func__);
tx->tx_submit(tx);
}
@@ -144,13 +137,14 @@ async_tx_channel_switch(struct dma_async_tx_descriptor *depend_tx,
/**
- * submit_disposition - while holding depend_tx->lock we must avoid submitting
- * new operations to prevent a circular locking dependency with
- * drivers that already hold a channel lock when calling
- * async_tx_run_dependencies.
+ * submit_disposition - flags for routing an incoming operation
* @ASYNC_TX_SUBMITTED: we were able to append the new operation under the lock
* @ASYNC_TX_CHANNEL_SWITCH: when the lock is dropped schedule a channel switch
* @ASYNC_TX_DIRECT_SUBMIT: when the lock is dropped submit directly
+ *
+ * while holding depend_tx->lock we must avoid submitting new operations
+ * to prevent a circular locking dependency with drivers that already
+ * hold a channel lock when calling async_tx_run_dependencies.
*/
enum submit_disposition {
ASYNC_TX_SUBMITTED,
@@ -160,11 +154,12 @@ enum submit_disposition {
void
async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx,
- enum async_tx_flags flags, struct dma_async_tx_descriptor *depend_tx,
- dma_async_tx_callback cb_fn, void *cb_param)
+ struct async_submit_ctl *submit)
{
- tx->callback = cb_fn;
- tx->callback_param = cb_param;
+ struct dma_async_tx_descriptor *depend_tx = submit->depend_tx;
+
+ tx->callback = submit->cb_fn;
+ tx->callback_param = submit->cb_param;
if (depend_tx) {
enum submit_disposition s;
@@ -175,21 +170,20 @@ async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx,
* 2/ dependencies are 1:1 i.e. two transactions can
* not depend on the same parent
*/
- BUG_ON(async_tx_test_ack(depend_tx) || depend_tx->next ||
- tx->parent);
+ BUG_ON(async_tx_test_ack(depend_tx) || txd_next(depend_tx) ||
+ txd_parent(tx));
/* the lock prevents async_tx_run_dependencies from missing
* the setting of ->next when ->parent != NULL
*/
- spin_lock_bh(&depend_tx->lock);
- if (depend_tx->parent) {
+ txd_lock(depend_tx);
+ if (txd_parent(depend_tx)) {
/* we have a parent so we can not submit directly
* if we are staying on the same channel: append
* else: channel switch
*/
if (depend_tx->chan == chan) {
- tx->parent = depend_tx;
- depend_tx->next = tx;
+ txd_chain(depend_tx, tx);
s = ASYNC_TX_SUBMITTED;
} else
s = ASYNC_TX_CHANNEL_SWITCH;
@@ -202,7 +196,7 @@ async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx,
else
s = ASYNC_TX_CHANNEL_SWITCH;
}
- spin_unlock_bh(&depend_tx->lock);
+ txd_unlock(depend_tx);
switch (s) {
case ASYNC_TX_SUBMITTED:
@@ -211,39 +205,38 @@ async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx,
async_tx_channel_switch(depend_tx, tx);
break;
case ASYNC_TX_DIRECT_SUBMIT:
- tx->parent = NULL;
+ txd_clear_parent(tx);
tx->tx_submit(tx);
break;
}
} else {
- tx->parent = NULL;
+ txd_clear_parent(tx);
tx->tx_submit(tx);
}
- if (flags & ASYNC_TX_ACK)
+ if (submit->flags & ASYNC_TX_ACK)
async_tx_ack(tx);
- if (depend_tx && (flags & ASYNC_TX_DEP_ACK))
+ if (depend_tx)
async_tx_ack(depend_tx);
}
EXPORT_SYMBOL_GPL(async_tx_submit);
/**
- * async_trigger_callback - schedules the callback function to be run after
- * any dependent operations have been completed.
- * @flags: ASYNC_TX_ACK, ASYNC_TX_DEP_ACK
- * @depend_tx: 'callback' requires the completion of this transaction
- * @cb_fn: function to call after depend_tx completes
- * @cb_param: parameter to pass to the callback routine
+ * async_trigger_callback - schedules the callback function to be run
+ * @submit: submission and completion parameters
+ *
+ * honored flags: ASYNC_TX_ACK
+ *
+ * The callback is run after any dependent operations have completed.
*/
struct dma_async_tx_descriptor *
-async_trigger_callback(enum async_tx_flags flags,
- struct dma_async_tx_descriptor *depend_tx,
- dma_async_tx_callback cb_fn, void *cb_param)
+async_trigger_callback(struct async_submit_ctl *submit)
{
struct dma_chan *chan;
struct dma_device *device;
struct dma_async_tx_descriptor *tx;
+ struct dma_async_tx_descriptor *depend_tx = submit->depend_tx;
if (depend_tx) {
chan = depend_tx->chan;
@@ -262,14 +255,14 @@ async_trigger_callback(enum async_tx_flags flags,
if (tx) {
pr_debug("%s: (async)\n", __func__);
- async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param);
+ async_tx_submit(chan, tx, submit);
} else {
pr_debug("%s: (sync)\n", __func__);
/* wait for any prerequisite operations */
- async_tx_quiesce(&depend_tx);
+ async_tx_quiesce(&submit->depend_tx);
- async_tx_sync_epilog(cb_fn, cb_param);
+ async_tx_sync_epilog(submit);
}
return tx;
@@ -287,17 +280,15 @@ void async_tx_quiesce(struct dma_async_tx_descriptor **tx)
* we are referring to the correct operation
*/
BUG_ON(async_tx_test_ack(*tx));
- if (dma_wait_for_async_tx(*tx) == DMA_ERROR)
- panic("DMA_ERROR waiting for transaction\n");
+ if (dma_wait_for_async_tx(*tx) != DMA_COMPLETE)
+ panic("%s: DMA error waiting for transaction\n",
+ __func__);
async_tx_ack(*tx);
*tx = NULL;
}
}
EXPORT_SYMBOL_GPL(async_tx_quiesce);
-module_init(async_tx_init);
-module_exit(async_tx_exit);
-
MODULE_AUTHOR("Intel Corporation");
MODULE_DESCRIPTION("Asynchronous Bulk Memory Transactions API");
MODULE_LICENSE("GPL");
diff --git a/crypto/async_tx/async_xor.c b/crypto/async_tx/async_xor.c
index 595b78672b3..3c562f5a60b 100644
--- a/crypto/async_tx/async_xor.c
+++ b/crypto/async_tx/async_xor.c
@@ -25,99 +25,84 @@
*/
#include <linux/kernel.h>
#include <linux/interrupt.h>
+#include <linux/module.h>
#include <linux/mm.h>
#include <linux/dma-mapping.h>
#include <linux/raid/xor.h>
#include <linux/async_tx.h>
-/* do_async_xor - dma map the pages and perform the xor with an engine.
- * This routine is marked __always_inline so it can be compiled away
- * when CONFIG_DMA_ENGINE=n
- */
-static __always_inline struct dma_async_tx_descriptor *
-do_async_xor(struct dma_chan *chan, struct page *dest, struct page **src_list,
- unsigned int offset, int src_cnt, size_t len,
- enum async_tx_flags flags,
- struct dma_async_tx_descriptor *depend_tx,
- dma_async_tx_callback cb_fn, void *cb_param)
+/* do_async_xor - dma map the pages and perform the xor with an engine */
+static __async_inline struct dma_async_tx_descriptor *
+do_async_xor(struct dma_chan *chan, struct dmaengine_unmap_data *unmap,
+ struct async_submit_ctl *submit)
{
struct dma_device *dma = chan->device;
- dma_addr_t *dma_src = (dma_addr_t *) src_list;
struct dma_async_tx_descriptor *tx = NULL;
- int src_off = 0;
- int i;
- dma_async_tx_callback _cb_fn;
- void *_cb_param;
- enum async_tx_flags async_flags;
- enum dma_ctrl_flags dma_flags;
+ dma_async_tx_callback cb_fn_orig = submit->cb_fn;
+ void *cb_param_orig = submit->cb_param;
+ enum async_tx_flags flags_orig = submit->flags;
+ enum dma_ctrl_flags dma_flags = 0;
+ int src_cnt = unmap->to_cnt;
int xor_src_cnt;
- dma_addr_t dma_dest;
-
- /* map the dest bidrectional in case it is re-used as a source */
- dma_dest = dma_map_page(dma->dev, dest, offset, len, DMA_BIDIRECTIONAL);
- for (i = 0; i < src_cnt; i++) {
- /* only map the dest once */
- if (unlikely(src_list[i] == dest)) {
- dma_src[i] = dma_dest;
- continue;
- }
- dma_src[i] = dma_map_page(dma->dev, src_list[i], offset,
- len, DMA_TO_DEVICE);
- }
+ dma_addr_t dma_dest = unmap->addr[unmap->to_cnt];
+ dma_addr_t *src_list = unmap->addr;
while (src_cnt) {
- async_flags = flags;
- dma_flags = 0;
- xor_src_cnt = min(src_cnt, dma->max_xor);
- /* if we are submitting additional xors, leave the chain open,
- * clear the callback parameters, and leave the destination
- * buffer mapped
+ dma_addr_t tmp;
+
+ submit->flags = flags_orig;
+ xor_src_cnt = min(src_cnt, (int)dma->max_xor);
+ /* if we are submitting additional xors, leave the chain open
+ * and clear the callback parameters
*/
if (src_cnt > xor_src_cnt) {
- async_flags &= ~ASYNC_TX_ACK;
- dma_flags = DMA_COMPL_SKIP_DEST_UNMAP;
- _cb_fn = NULL;
- _cb_param = NULL;
+ submit->flags &= ~ASYNC_TX_ACK;
+ submit->flags |= ASYNC_TX_FENCE;
+ submit->cb_fn = NULL;
+ submit->cb_param = NULL;
} else {
- _cb_fn = cb_fn;
- _cb_param = cb_param;
+ submit->cb_fn = cb_fn_orig;
+ submit->cb_param = cb_param_orig;
}
- if (_cb_fn)
+ if (submit->cb_fn)
dma_flags |= DMA_PREP_INTERRUPT;
+ if (submit->flags & ASYNC_TX_FENCE)
+ dma_flags |= DMA_PREP_FENCE;
- /* Since we have clobbered the src_list we are committed
- * to doing this asynchronously. Drivers force forward progress
- * in case they can not provide a descriptor
+ /* Drivers force forward progress in case they can not provide a
+ * descriptor
*/
- tx = dma->device_prep_dma_xor(chan, dma_dest, &dma_src[src_off],
- xor_src_cnt, len, dma_flags);
+ tmp = src_list[0];
+ if (src_list > unmap->addr)
+ src_list[0] = dma_dest;
+ tx = dma->device_prep_dma_xor(chan, dma_dest, src_list,
+ xor_src_cnt, unmap->len,
+ dma_flags);
+ src_list[0] = tmp;
+
if (unlikely(!tx))
- async_tx_quiesce(&depend_tx);
+ async_tx_quiesce(&submit->depend_tx);
- /* spin wait for the preceeding transactions to complete */
+ /* spin wait for the preceding transactions to complete */
while (unlikely(!tx)) {
dma_async_issue_pending(chan);
tx = dma->device_prep_dma_xor(chan, dma_dest,
- &dma_src[src_off],
- xor_src_cnt, len,
+ src_list,
+ xor_src_cnt, unmap->len,
dma_flags);
}
- async_tx_submit(chan, tx, async_flags, depend_tx, _cb_fn,
- _cb_param);
-
- depend_tx = tx;
- flags |= ASYNC_TX_DEP_ACK;
+ dma_set_unmap(tx, unmap);
+ async_tx_submit(chan, tx, submit);
+ submit->depend_tx = tx;
if (src_cnt > xor_src_cnt) {
/* drop completed sources */
src_cnt -= xor_src_cnt;
- src_off += xor_src_cnt;
-
/* use the intermediate result a source */
- dma_src[--src_off] = dma_dest;
src_cnt++;
+ src_list += xor_src_cnt - 1;
} else
break;
}
@@ -127,23 +112,28 @@ do_async_xor(struct dma_chan *chan, struct page *dest, struct page **src_list,
static void
do_sync_xor(struct page *dest, struct page **src_list, unsigned int offset,
- int src_cnt, size_t len, enum async_tx_flags flags,
- dma_async_tx_callback cb_fn, void *cb_param)
+ int src_cnt, size_t len, struct async_submit_ctl *submit)
{
int i;
- int xor_src_cnt;
+ int xor_src_cnt = 0;
int src_off = 0;
void *dest_buf;
- void **srcs = (void **) src_list;
+ void **srcs;
- /* reuse the 'src_list' array to convert to buffer pointers */
- for (i = 0; i < src_cnt; i++)
- srcs[i] = page_address(src_list[i]) + offset;
+ if (submit->scribble)
+ srcs = submit->scribble;
+ else
+ srcs = (void **) src_list;
+ /* convert to buffer pointers */
+ for (i = 0; i < src_cnt; i++)
+ if (src_list[i])
+ srcs[xor_src_cnt++] = page_address(src_list[i]) + offset;
+ src_cnt = xor_src_cnt;
/* set destination address */
dest_buf = page_address(dest) + offset;
- if (flags & ASYNC_TX_XOR_ZERO_DST)
+ if (submit->flags & ASYNC_TX_XOR_ZERO_DST)
memset(dest_buf, 0, len);
while (src_cnt > 0) {
@@ -156,61 +146,88 @@ do_sync_xor(struct page *dest, struct page **src_list, unsigned int offset,
src_off += xor_src_cnt;
}
- async_tx_sync_epilog(cb_fn, cb_param);
+ async_tx_sync_epilog(submit);
}
/**
* async_xor - attempt to xor a set of blocks with a dma engine.
- * xor_blocks always uses the dest as a source so the ASYNC_TX_XOR_ZERO_DST
- * flag must be set to not include dest data in the calculation. The
- * assumption with dma eninges is that they only use the destination
- * buffer as a source when it is explicity specified in the source list.
* @dest: destination page
- * @src_list: array of source pages (if the dest is also a source it must be
- * at index zero). The contents of this array may be overwritten.
- * @offset: offset in pages to start transaction
+ * @src_list: array of source pages
+ * @offset: common src/dst offset to start transaction
* @src_cnt: number of source pages
* @len: length in bytes
- * @flags: ASYNC_TX_XOR_ZERO_DST, ASYNC_TX_XOR_DROP_DEST,
- * ASYNC_TX_ACK, ASYNC_TX_DEP_ACK
- * @depend_tx: xor depends on the result of this transaction.
- * @cb_fn: function to call when the xor completes
- * @cb_param: parameter to pass to the callback routine
+ * @submit: submission / completion modifiers
+ *
+ * honored flags: ASYNC_TX_ACK, ASYNC_TX_XOR_ZERO_DST, ASYNC_TX_XOR_DROP_DST
+ *
+ * xor_blocks always uses the dest as a source so the
+ * ASYNC_TX_XOR_ZERO_DST flag must be set to not include dest data in
+ * the calculation. The assumption with dma eninges is that they only
+ * use the destination buffer as a source when it is explicity specified
+ * in the source list.
+ *
+ * src_list note: if the dest is also a source it must be at index zero.
+ * The contents of this array will be overwritten if a scribble region
+ * is not specified.
*/
struct dma_async_tx_descriptor *
async_xor(struct page *dest, struct page **src_list, unsigned int offset,
- int src_cnt, size_t len, enum async_tx_flags flags,
- struct dma_async_tx_descriptor *depend_tx,
- dma_async_tx_callback cb_fn, void *cb_param)
+ int src_cnt, size_t len, struct async_submit_ctl *submit)
{
- struct dma_chan *chan = async_tx_find_channel(depend_tx, DMA_XOR,
+ struct dma_chan *chan = async_tx_find_channel(submit, DMA_XOR,
&dest, 1, src_list,
src_cnt, len);
+ struct dma_device *device = chan ? chan->device : NULL;
+ struct dmaengine_unmap_data *unmap = NULL;
+
BUG_ON(src_cnt <= 1);
- if (chan) {
+ if (device)
+ unmap = dmaengine_get_unmap_data(device->dev, src_cnt+1, GFP_NOIO);
+
+ if (unmap && is_dma_xor_aligned(device, offset, 0, len)) {
+ struct dma_async_tx_descriptor *tx;
+ int i, j;
+
/* run the xor asynchronously */
pr_debug("%s (async): len: %zu\n", __func__, len);
- return do_async_xor(chan, dest, src_list, offset, src_cnt, len,
- flags, depend_tx, cb_fn, cb_param);
+ unmap->len = len;
+ for (i = 0, j = 0; i < src_cnt; i++) {
+ if (!src_list[i])
+ continue;
+ unmap->to_cnt++;
+ unmap->addr[j++] = dma_map_page(device->dev, src_list[i],
+ offset, len, DMA_TO_DEVICE);
+ }
+
+ /* map it bidirectional as it may be re-used as a source */
+ unmap->addr[j] = dma_map_page(device->dev, dest, offset, len,
+ DMA_BIDIRECTIONAL);
+ unmap->bidi_cnt = 1;
+
+ tx = do_async_xor(chan, unmap, submit);
+ dmaengine_unmap_put(unmap);
+ return tx;
} else {
+ dmaengine_unmap_put(unmap);
/* run the xor synchronously */
pr_debug("%s (sync): len: %zu\n", __func__, len);
+ WARN_ONCE(chan, "%s: no space for dma address conversion\n",
+ __func__);
/* in the sync case the dest is an implied source
* (assumes the dest is the first source)
*/
- if (flags & ASYNC_TX_XOR_DROP_DST) {
+ if (submit->flags & ASYNC_TX_XOR_DROP_DST) {
src_cnt--;
src_list++;
}
/* wait for any prerequisite operations */
- async_tx_quiesce(&depend_tx);
+ async_tx_quiesce(&submit->depend_tx);
- do_sync_xor(dest, src_list, offset, src_cnt, len,
- flags, cb_fn, cb_param);
+ do_sync_xor(dest, src_list, offset, src_cnt, len, submit);
return NULL;
}
@@ -219,110 +236,110 @@ EXPORT_SYMBOL_GPL(async_xor);
static int page_is_zero(struct page *p, unsigned int offset, size_t len)
{
- char *a = page_address(p) + offset;
- return ((*(u32 *) a) == 0 &&
- memcmp(a, a + 4, len - 4) == 0);
+ return !memchr_inv(page_address(p) + offset, 0, len);
+}
+
+static inline struct dma_chan *
+xor_val_chan(struct async_submit_ctl *submit, struct page *dest,
+ struct page **src_list, int src_cnt, size_t len)
+{
+ #ifdef CONFIG_ASYNC_TX_DISABLE_XOR_VAL_DMA
+ return NULL;
+ #endif
+ return async_tx_find_channel(submit, DMA_XOR_VAL, &dest, 1, src_list,
+ src_cnt, len);
}
/**
- * async_xor_zero_sum - attempt a xor parity check with a dma engine.
+ * async_xor_val - attempt a xor parity check with a dma engine.
* @dest: destination page used if the xor is performed synchronously
- * @src_list: array of source pages. The dest page must be listed as a source
- * at index zero. The contents of this array may be overwritten.
+ * @src_list: array of source pages
* @offset: offset in pages to start transaction
* @src_cnt: number of source pages
* @len: length in bytes
* @result: 0 if sum == 0 else non-zero
- * @flags: ASYNC_TX_ACK, ASYNC_TX_DEP_ACK
- * @depend_tx: xor depends on the result of this transaction.
- * @cb_fn: function to call when the xor completes
- * @cb_param: parameter to pass to the callback routine
+ * @submit: submission / completion modifiers
+ *
+ * honored flags: ASYNC_TX_ACK
+ *
+ * src_list note: if the dest is also a source it must be at index zero.
+ * The contents of this array will be overwritten if a scribble region
+ * is not specified.
*/
struct dma_async_tx_descriptor *
-async_xor_zero_sum(struct page *dest, struct page **src_list,
- unsigned int offset, int src_cnt, size_t len,
- u32 *result, enum async_tx_flags flags,
- struct dma_async_tx_descriptor *depend_tx,
- dma_async_tx_callback cb_fn, void *cb_param)
+async_xor_val(struct page *dest, struct page **src_list, unsigned int offset,
+ int src_cnt, size_t len, enum sum_check_flags *result,
+ struct async_submit_ctl *submit)
{
- struct dma_chan *chan = async_tx_find_channel(depend_tx, DMA_ZERO_SUM,
- &dest, 1, src_list,
- src_cnt, len);
+ struct dma_chan *chan = xor_val_chan(submit, dest, src_list, src_cnt, len);
struct dma_device *device = chan ? chan->device : NULL;
struct dma_async_tx_descriptor *tx = NULL;
+ struct dmaengine_unmap_data *unmap = NULL;
BUG_ON(src_cnt <= 1);
- if (device && src_cnt <= device->max_xor) {
- dma_addr_t *dma_src = (dma_addr_t *) src_list;
- unsigned long dma_prep_flags = cb_fn ? DMA_PREP_INTERRUPT : 0;
+ if (device)
+ unmap = dmaengine_get_unmap_data(device->dev, src_cnt, GFP_NOIO);
+
+ if (unmap && src_cnt <= device->max_xor &&
+ is_dma_xor_aligned(device, offset, 0, len)) {
+ unsigned long dma_prep_flags = 0;
int i;
pr_debug("%s: (async) len: %zu\n", __func__, len);
- for (i = 0; i < src_cnt; i++)
- dma_src[i] = dma_map_page(device->dev, src_list[i],
- offset, len, DMA_TO_DEVICE);
+ if (submit->cb_fn)
+ dma_prep_flags |= DMA_PREP_INTERRUPT;
+ if (submit->flags & ASYNC_TX_FENCE)
+ dma_prep_flags |= DMA_PREP_FENCE;
+
+ for (i = 0; i < src_cnt; i++) {
+ unmap->addr[i] = dma_map_page(device->dev, src_list[i],
+ offset, len, DMA_TO_DEVICE);
+ unmap->to_cnt++;
+ }
+ unmap->len = len;
- tx = device->device_prep_dma_zero_sum(chan, dma_src, src_cnt,
- len, result,
- dma_prep_flags);
+ tx = device->device_prep_dma_xor_val(chan, unmap->addr, src_cnt,
+ len, result,
+ dma_prep_flags);
if (unlikely(!tx)) {
- async_tx_quiesce(&depend_tx);
+ async_tx_quiesce(&submit->depend_tx);
while (!tx) {
dma_async_issue_pending(chan);
- tx = device->device_prep_dma_zero_sum(chan,
- dma_src, src_cnt, len, result,
+ tx = device->device_prep_dma_xor_val(chan,
+ unmap->addr, src_cnt, len, result,
dma_prep_flags);
}
}
-
- async_tx_submit(chan, tx, flags, depend_tx, cb_fn, cb_param);
+ dma_set_unmap(tx, unmap);
+ async_tx_submit(chan, tx, submit);
} else {
- unsigned long xor_flags = flags;
+ enum async_tx_flags flags_orig = submit->flags;
pr_debug("%s: (sync) len: %zu\n", __func__, len);
+ WARN_ONCE(device && src_cnt <= device->max_xor,
+ "%s: no space for dma address conversion\n",
+ __func__);
- xor_flags |= ASYNC_TX_XOR_DROP_DST;
- xor_flags &= ~ASYNC_TX_ACK;
+ submit->flags |= ASYNC_TX_XOR_DROP_DST;
+ submit->flags &= ~ASYNC_TX_ACK;
- tx = async_xor(dest, src_list, offset, src_cnt, len, xor_flags,
- depend_tx, NULL, NULL);
+ tx = async_xor(dest, src_list, offset, src_cnt, len, submit);
async_tx_quiesce(&tx);
- *result = page_is_zero(dest, offset, len) ? 0 : 1;
+ *result = !page_is_zero(dest, offset, len) << SUM_CHECK_P;
- async_tx_sync_epilog(cb_fn, cb_param);
+ async_tx_sync_epilog(submit);
+ submit->flags = flags_orig;
}
+ dmaengine_unmap_put(unmap);
return tx;
}
-EXPORT_SYMBOL_GPL(async_xor_zero_sum);
-
-static int __init async_xor_init(void)
-{
- #ifdef CONFIG_DMA_ENGINE
- /* To conserve stack space the input src_list (array of page pointers)
- * is reused to hold the array of dma addresses passed to the driver.
- * This conversion is only possible when dma_addr_t is less than the
- * the size of a pointer. HIGHMEM64G is known to violate this
- * assumption.
- */
- BUILD_BUG_ON(sizeof(dma_addr_t) > sizeof(struct page *));
- #endif
-
- return 0;
-}
-
-static void __exit async_xor_exit(void)
-{
- do { } while (0);
-}
-
-module_init(async_xor_init);
-module_exit(async_xor_exit);
+EXPORT_SYMBOL_GPL(async_xor_val);
MODULE_AUTHOR("Intel Corporation");
MODULE_DESCRIPTION("asynchronous xor/xor-zero-sum api");
diff --git a/crypto/async_tx/raid6test.c b/crypto/async_tx/raid6test.c
new file mode 100644
index 00000000000..dad95f45b88
--- /dev/null
+++ b/crypto/async_tx/raid6test.c
@@ -0,0 +1,253 @@
+/*
+ * asynchronous raid6 recovery self test
+ * Copyright (c) 2009, Intel Corporation.
+ *
+ * based on drivers/md/raid6test/test.c:
+ * Copyright 2002-2007 H. Peter Anvin
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ */
+#include <linux/async_tx.h>
+#include <linux/gfp.h>
+#include <linux/mm.h>
+#include <linux/random.h>
+#include <linux/module.h>
+
+#undef pr
+#define pr(fmt, args...) pr_info("raid6test: " fmt, ##args)
+
+#define NDISKS 64 /* Including P and Q */
+
+static struct page *dataptrs[NDISKS];
+static addr_conv_t addr_conv[NDISKS];
+static struct page *data[NDISKS+3];
+static struct page *spare;
+static struct page *recovi;
+static struct page *recovj;
+
+static void callback(void *param)
+{
+ struct completion *cmp = param;
+
+ complete(cmp);
+}
+
+static void makedata(int disks)
+{
+ int i;
+
+ for (i = 0; i < disks; i++) {
+ prandom_bytes(page_address(data[i]), PAGE_SIZE);
+ dataptrs[i] = data[i];
+ }
+}
+
+static char disk_type(int d, int disks)
+{
+ if (d == disks - 2)
+ return 'P';
+ else if (d == disks - 1)
+ return 'Q';
+ else
+ return 'D';
+}
+
+/* Recover two failed blocks. */
+static void raid6_dual_recov(int disks, size_t bytes, int faila, int failb, struct page **ptrs)
+{
+ struct async_submit_ctl submit;
+ struct completion cmp;
+ struct dma_async_tx_descriptor *tx = NULL;
+ enum sum_check_flags result = ~0;
+
+ if (faila > failb)
+ swap(faila, failb);
+
+ if (failb == disks-1) {
+ if (faila == disks-2) {
+ /* P+Q failure. Just rebuild the syndrome. */
+ init_async_submit(&submit, 0, NULL, NULL, NULL, addr_conv);
+ tx = async_gen_syndrome(ptrs, 0, disks, bytes, &submit);
+ } else {
+ struct page *blocks[disks];
+ struct page *dest;
+ int count = 0;
+ int i;
+
+ /* data+Q failure. Reconstruct data from P,
+ * then rebuild syndrome
+ */
+ for (i = disks; i-- ; ) {
+ if (i == faila || i == failb)
+ continue;
+ blocks[count++] = ptrs[i];
+ }
+ dest = ptrs[faila];
+ init_async_submit(&submit, ASYNC_TX_XOR_ZERO_DST, NULL,
+ NULL, NULL, addr_conv);
+ tx = async_xor(dest, blocks, 0, count, bytes, &submit);
+
+ init_async_submit(&submit, 0, tx, NULL, NULL, addr_conv);
+ tx = async_gen_syndrome(ptrs, 0, disks, bytes, &submit);
+ }
+ } else {
+ if (failb == disks-2) {
+ /* data+P failure. */
+ init_async_submit(&submit, 0, NULL, NULL, NULL, addr_conv);
+ tx = async_raid6_datap_recov(disks, bytes, faila, ptrs, &submit);
+ } else {
+ /* data+data failure. */
+ init_async_submit(&submit, 0, NULL, NULL, NULL, addr_conv);
+ tx = async_raid6_2data_recov(disks, bytes, faila, failb, ptrs, &submit);
+ }
+ }
+ init_completion(&cmp);
+ init_async_submit(&submit, ASYNC_TX_ACK, tx, callback, &cmp, addr_conv);
+ tx = async_syndrome_val(ptrs, 0, disks, bytes, &result, spare, &submit);
+ async_tx_issue_pending(tx);
+
+ if (wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000)) == 0)
+ pr("%s: timeout! (faila: %d failb: %d disks: %d)\n",
+ __func__, faila, failb, disks);
+
+ if (result != 0)
+ pr("%s: validation failure! faila: %d failb: %d sum_check_flags: %x\n",
+ __func__, faila, failb, result);
+}
+
+static int test_disks(int i, int j, int disks)
+{
+ int erra, errb;
+
+ memset(page_address(recovi), 0xf0, PAGE_SIZE);
+ memset(page_address(recovj), 0xba, PAGE_SIZE);
+
+ dataptrs[i] = recovi;
+ dataptrs[j] = recovj;
+
+ raid6_dual_recov(disks, PAGE_SIZE, i, j, dataptrs);
+
+ erra = memcmp(page_address(data[i]), page_address(recovi), PAGE_SIZE);
+ errb = memcmp(page_address(data[j]), page_address(recovj), PAGE_SIZE);
+
+ pr("%s(%d, %d): faila=%3d(%c) failb=%3d(%c) %s\n",
+ __func__, i, j, i, disk_type(i, disks), j, disk_type(j, disks),
+ (!erra && !errb) ? "OK" : !erra ? "ERRB" : !errb ? "ERRA" : "ERRAB");
+
+ dataptrs[i] = data[i];
+ dataptrs[j] = data[j];
+
+ return erra || errb;
+}
+
+static int test(int disks, int *tests)
+{
+ struct dma_async_tx_descriptor *tx;
+ struct async_submit_ctl submit;
+ struct completion cmp;
+ int err = 0;
+ int i, j;
+
+ recovi = data[disks];
+ recovj = data[disks+1];
+ spare = data[disks+2];
+
+ makedata(disks);
+
+ /* Nuke syndromes */
+ memset(page_address(data[disks-2]), 0xee, PAGE_SIZE);
+ memset(page_address(data[disks-1]), 0xee, PAGE_SIZE);
+
+ /* Generate assumed good syndrome */
+ init_completion(&cmp);
+ init_async_submit(&submit, ASYNC_TX_ACK, NULL, callback, &cmp, addr_conv);
+ tx = async_gen_syndrome(dataptrs, 0, disks, PAGE_SIZE, &submit);
+ async_tx_issue_pending(tx);
+
+ if (wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000)) == 0) {
+ pr("error: initial gen_syndrome(%d) timed out\n", disks);
+ return 1;
+ }
+
+ pr("testing the %d-disk case...\n", disks);
+ for (i = 0; i < disks-1; i++)
+ for (j = i+1; j < disks; j++) {
+ (*tests)++;
+ err += test_disks(i, j, disks);
+ }
+
+ return err;
+}
+
+
+static int raid6_test(void)
+{
+ int err = 0;
+ int tests = 0;
+ int i;
+
+ for (i = 0; i < NDISKS+3; i++) {
+ data[i] = alloc_page(GFP_KERNEL);
+ if (!data[i]) {
+ while (i--)
+ put_page(data[i]);
+ return -ENOMEM;
+ }
+ }
+
+ /* the 4-disk and 5-disk cases are special for the recovery code */
+ if (NDISKS > 4)
+ err += test(4, &tests);
+ if (NDISKS > 5)
+ err += test(5, &tests);
+ /* the 11 and 12 disk cases are special for ioatdma (p-disabled
+ * q-continuation without extended descriptor)
+ */
+ if (NDISKS > 12) {
+ err += test(11, &tests);
+ err += test(12, &tests);
+ }
+
+ /* the 24 disk case is special for ioatdma as it is the boudary point
+ * at which it needs to switch from 8-source ops to 16-source
+ * ops for continuation (assumes DMA_HAS_PQ_CONTINUE is not set)
+ */
+ if (NDISKS > 24)
+ err += test(24, &tests);
+
+ err += test(NDISKS, &tests);
+
+ pr("\n");
+ pr("complete (%d tests, %d failure%s)\n",
+ tests, err, err == 1 ? "" : "s");
+
+ for (i = 0; i < NDISKS+3; i++)
+ put_page(data[i]);
+
+ return 0;
+}
+
+static void raid6_test_exit(void)
+{
+}
+
+/* when compiled-in wait for drivers to load first (assumes dma drivers
+ * are also compliled-in)
+ */
+late_initcall(raid6_test);
+module_exit(raid6_test_exit);
+MODULE_AUTHOR("Dan Williams <dan.j.williams@intel.com>");
+MODULE_DESCRIPTION("asynchronous RAID-6 recovery self tests");
+MODULE_LICENSE("GPL");
diff --git a/crypto/authenc.c b/crypto/authenc.c
index 5793b64c81a..e1223559d5d 100644
--- a/crypto/authenc.c
+++ b/crypto/authenc.c
@@ -23,52 +23,82 @@
#include <linux/slab.h>
#include <linux/spinlock.h>
+typedef u8 *(*authenc_ahash_t)(struct aead_request *req, unsigned int flags);
+
struct authenc_instance_ctx {
- struct crypto_spawn auth;
+ struct crypto_ahash_spawn auth;
struct crypto_skcipher_spawn enc;
};
struct crypto_authenc_ctx {
- spinlock_t auth_lock;
- struct crypto_hash *auth;
+ unsigned int reqoff;
+ struct crypto_ahash *auth;
struct crypto_ablkcipher *enc;
};
-static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
- unsigned int keylen)
+struct authenc_request_ctx {
+ unsigned int cryptlen;
+ struct scatterlist *sg;
+ struct scatterlist asg[2];
+ struct scatterlist cipher[2];
+ crypto_completion_t complete;
+ crypto_completion_t update_complete;
+ char tail[];
+};
+
+static void authenc_request_complete(struct aead_request *req, int err)
{
- unsigned int authkeylen;
- unsigned int enckeylen;
- struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
- struct crypto_hash *auth = ctx->auth;
- struct crypto_ablkcipher *enc = ctx->enc;
- struct rtattr *rta = (void *)key;
+ if (err != -EINPROGRESS)
+ aead_request_complete(req, err);
+}
+
+int crypto_authenc_extractkeys(struct crypto_authenc_keys *keys, const u8 *key,
+ unsigned int keylen)
+{
+ struct rtattr *rta = (struct rtattr *)key;
struct crypto_authenc_key_param *param;
- int err = -EINVAL;
if (!RTA_OK(rta, keylen))
- goto badkey;
+ return -EINVAL;
if (rta->rta_type != CRYPTO_AUTHENC_KEYA_PARAM)
- goto badkey;
+ return -EINVAL;
if (RTA_PAYLOAD(rta) < sizeof(*param))
- goto badkey;
+ return -EINVAL;
param = RTA_DATA(rta);
- enckeylen = be32_to_cpu(param->enckeylen);
+ keys->enckeylen = be32_to_cpu(param->enckeylen);
key += RTA_ALIGN(rta->rta_len);
keylen -= RTA_ALIGN(rta->rta_len);
- if (keylen < enckeylen)
- goto badkey;
+ if (keylen < keys->enckeylen)
+ return -EINVAL;
+
+ keys->authkeylen = keylen - keys->enckeylen;
+ keys->authkey = key;
+ keys->enckey = key + keys->authkeylen;
+
+ return 0;
+}
+EXPORT_SYMBOL_GPL(crypto_authenc_extractkeys);
+
+static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
+ unsigned int keylen)
+{
+ struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
+ struct crypto_ahash *auth = ctx->auth;
+ struct crypto_ablkcipher *enc = ctx->enc;
+ struct crypto_authenc_keys keys;
+ int err = -EINVAL;
- authkeylen = keylen - enckeylen;
+ if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
+ goto badkey;
- crypto_hash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
- crypto_hash_set_flags(auth, crypto_aead_get_flags(authenc) &
+ crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
+ crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_hash_setkey(auth, key, authkeylen);
- crypto_aead_set_flags(authenc, crypto_hash_get_flags(auth) &
+ err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen);
+ crypto_aead_set_flags(authenc, crypto_ahash_get_flags(auth) &
CRYPTO_TFM_RES_MASK);
if (err)
@@ -77,7 +107,7 @@ static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
crypto_ablkcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
crypto_ablkcipher_set_flags(enc, crypto_aead_get_flags(authenc) &
CRYPTO_TFM_REQ_MASK);
- err = crypto_ablkcipher_setkey(enc, key + authkeylen, enckeylen);
+ err = crypto_ablkcipher_setkey(enc, keys.enckey, keys.enckeylen);
crypto_aead_set_flags(authenc, crypto_ablkcipher_get_flags(enc) &
CRYPTO_TFM_RES_MASK);
@@ -89,54 +119,202 @@ badkey:
goto out;
}
-static void authenc_chain(struct scatterlist *head, struct scatterlist *sg,
- int chain)
+static void authenc_geniv_ahash_update_done(struct crypto_async_request *areq,
+ int err)
{
- if (chain) {
- head->length += sg->length;
- sg = scatterwalk_sg_next(sg);
- }
+ struct aead_request *req = areq->data;
+ struct crypto_aead *authenc = crypto_aead_reqtfm(req);
+ struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
+ struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
- if (sg)
- scatterwalk_sg_chain(head, 2, sg);
- else
- sg_mark_end(head);
+ if (err)
+ goto out;
+
+ ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result,
+ areq_ctx->cryptlen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) &
+ CRYPTO_TFM_REQ_MAY_SLEEP,
+ areq_ctx->complete, req);
+
+ err = crypto_ahash_finup(ahreq);
+ if (err)
+ goto out;
+
+ scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg,
+ areq_ctx->cryptlen,
+ crypto_aead_authsize(authenc), 1);
+
+out:
+ authenc_request_complete(req, err);
+}
+
+static void authenc_geniv_ahash_done(struct crypto_async_request *areq, int err)
+{
+ struct aead_request *req = areq->data;
+ struct crypto_aead *authenc = crypto_aead_reqtfm(req);
+ struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
+ struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
+
+ if (err)
+ goto out;
+
+ scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg,
+ areq_ctx->cryptlen,
+ crypto_aead_authsize(authenc), 1);
+
+out:
+ aead_request_complete(req, err);
}
-static u8 *crypto_authenc_hash(struct aead_request *req, unsigned int flags,
- struct scatterlist *cipher,
- unsigned int cryptlen)
+static void authenc_verify_ahash_update_done(struct crypto_async_request *areq,
+ int err)
{
+ u8 *ihash;
+ unsigned int authsize;
+ struct ablkcipher_request *abreq;
+ struct aead_request *req = areq->data;
struct crypto_aead *authenc = crypto_aead_reqtfm(req);
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
- struct crypto_hash *auth = ctx->auth;
- struct hash_desc desc = {
- .tfm = auth,
- .flags = aead_request_flags(req) & flags,
- };
- u8 *hash = aead_request_ctx(req);
+ struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
+ unsigned int cryptlen = req->cryptlen;
+
+ if (err)
+ goto out;
+
+ ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result,
+ areq_ctx->cryptlen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) &
+ CRYPTO_TFM_REQ_MAY_SLEEP,
+ areq_ctx->complete, req);
+
+ err = crypto_ahash_finup(ahreq);
+ if (err)
+ goto out;
+
+ authsize = crypto_aead_authsize(authenc);
+ cryptlen -= authsize;
+ ihash = ahreq->result + authsize;
+ scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
+ authsize, 0);
+
+ err = crypto_memneq(ihash, ahreq->result, authsize) ? -EBADMSG : 0;
+ if (err)
+ goto out;
+
+ abreq = aead_request_ctx(req);
+ ablkcipher_request_set_tfm(abreq, ctx->enc);
+ ablkcipher_request_set_callback(abreq, aead_request_flags(req),
+ req->base.complete, req->base.data);
+ ablkcipher_request_set_crypt(abreq, req->src, req->dst,
+ cryptlen, req->iv);
+
+ err = crypto_ablkcipher_decrypt(abreq);
+
+out:
+ authenc_request_complete(req, err);
+}
+
+static void authenc_verify_ahash_done(struct crypto_async_request *areq,
+ int err)
+{
+ u8 *ihash;
+ unsigned int authsize;
+ struct ablkcipher_request *abreq;
+ struct aead_request *req = areq->data;
+ struct crypto_aead *authenc = crypto_aead_reqtfm(req);
+ struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
+ struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
+ unsigned int cryptlen = req->cryptlen;
+
+ if (err)
+ goto out;
+
+ authsize = crypto_aead_authsize(authenc);
+ cryptlen -= authsize;
+ ihash = ahreq->result + authsize;
+ scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
+ authsize, 0);
+
+ err = crypto_memneq(ihash, ahreq->result, authsize) ? -EBADMSG : 0;
+ if (err)
+ goto out;
+
+ abreq = aead_request_ctx(req);
+ ablkcipher_request_set_tfm(abreq, ctx->enc);
+ ablkcipher_request_set_callback(abreq, aead_request_flags(req),
+ req->base.complete, req->base.data);
+ ablkcipher_request_set_crypt(abreq, req->src, req->dst,
+ cryptlen, req->iv);
+
+ err = crypto_ablkcipher_decrypt(abreq);
+
+out:
+ authenc_request_complete(req, err);
+}
+
+static u8 *crypto_authenc_ahash_fb(struct aead_request *req, unsigned int flags)
+{
+ struct crypto_aead *authenc = crypto_aead_reqtfm(req);
+ struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
+ struct crypto_ahash *auth = ctx->auth;
+ struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
+ u8 *hash = areq_ctx->tail;
int err;
- hash = (u8 *)ALIGN((unsigned long)hash + crypto_hash_alignmask(auth),
- crypto_hash_alignmask(auth) + 1);
+ hash = (u8 *)ALIGN((unsigned long)hash + crypto_ahash_alignmask(auth),
+ crypto_ahash_alignmask(auth) + 1);
+
+ ahash_request_set_tfm(ahreq, auth);
- spin_lock_bh(&ctx->auth_lock);
- err = crypto_hash_init(&desc);
+ err = crypto_ahash_init(ahreq);
if (err)
- goto auth_unlock;
+ return ERR_PTR(err);
- err = crypto_hash_update(&desc, req->assoc, req->assoclen);
+ ahash_request_set_crypt(ahreq, req->assoc, hash, req->assoclen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
+ areq_ctx->update_complete, req);
+
+ err = crypto_ahash_update(ahreq);
if (err)
- goto auth_unlock;
+ return ERR_PTR(err);
- err = crypto_hash_update(&desc, cipher, cryptlen);
+ ahash_request_set_crypt(ahreq, areq_ctx->sg, hash,
+ areq_ctx->cryptlen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
+ areq_ctx->complete, req);
+
+ err = crypto_ahash_finup(ahreq);
if (err)
- goto auth_unlock;
+ return ERR_PTR(err);
+
+ return hash;
+}
- err = crypto_hash_final(&desc, hash);
-auth_unlock:
- spin_unlock_bh(&ctx->auth_lock);
+static u8 *crypto_authenc_ahash(struct aead_request *req, unsigned int flags)
+{
+ struct crypto_aead *authenc = crypto_aead_reqtfm(req);
+ struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
+ struct crypto_ahash *auth = ctx->auth;
+ struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
+ u8 *hash = areq_ctx->tail;
+ int err;
+ hash = (u8 *)ALIGN((unsigned long)hash + crypto_ahash_alignmask(auth),
+ crypto_ahash_alignmask(auth) + 1);
+
+ ahash_request_set_tfm(ahreq, auth);
+ ahash_request_set_crypt(ahreq, areq_ctx->sg, hash,
+ areq_ctx->cryptlen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
+ areq_ctx->complete, req);
+
+ err = crypto_ahash_digest(ahreq);
if (err)
return ERR_PTR(err);
@@ -147,11 +325,15 @@ static int crypto_authenc_genicv(struct aead_request *req, u8 *iv,
unsigned int flags)
{
struct crypto_aead *authenc = crypto_aead_reqtfm(req);
+ struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
struct scatterlist *dst = req->dst;
- struct scatterlist cipher[2];
- struct page *dstp;
+ struct scatterlist *assoc = req->assoc;
+ struct scatterlist *cipher = areq_ctx->cipher;
+ struct scatterlist *asg = areq_ctx->asg;
unsigned int ivsize = crypto_aead_ivsize(authenc);
- unsigned int cryptlen;
+ unsigned int cryptlen = req->cryptlen;
+ authenc_ahash_t authenc_ahash_fn = crypto_authenc_ahash_fb;
+ struct page *dstp;
u8 *vdst;
u8 *hash;
@@ -161,12 +343,27 @@ static int crypto_authenc_genicv(struct aead_request *req, u8 *iv,
if (ivsize) {
sg_init_table(cipher, 2);
sg_set_buf(cipher, iv, ivsize);
- authenc_chain(cipher, dst, vdst == iv + ivsize);
+ scatterwalk_crypto_chain(cipher, dst, vdst == iv + ivsize, 2);
dst = cipher;
+ cryptlen += ivsize;
+ }
+
+ if (req->assoclen && sg_is_last(assoc)) {
+ authenc_ahash_fn = crypto_authenc_ahash;
+ sg_init_table(asg, 2);
+ sg_set_page(asg, sg_page(assoc), assoc->length, assoc->offset);
+ scatterwalk_crypto_chain(asg, dst, 0, 2);
+ dst = asg;
+ cryptlen += req->assoclen;
}
- cryptlen = req->cryptlen + ivsize;
- hash = crypto_authenc_hash(req, flags, dst, cryptlen);
+ areq_ctx->cryptlen = cryptlen;
+ areq_ctx->sg = dst;
+
+ areq_ctx->complete = authenc_geniv_ahash_done;
+ areq_ctx->update_complete = authenc_geniv_ahash_update_done;
+
+ hash = authenc_ahash_fn(req, flags);
if (IS_ERR(hash))
return PTR_ERR(hash);
@@ -183,25 +380,28 @@ static void crypto_authenc_encrypt_done(struct crypto_async_request *req,
if (!err) {
struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
- struct ablkcipher_request *abreq = aead_request_ctx(areq);
- u8 *iv = (u8 *)(abreq + 1) +
- crypto_ablkcipher_reqsize(ctx->enc);
+ struct authenc_request_ctx *areq_ctx = aead_request_ctx(areq);
+ struct ablkcipher_request *abreq = (void *)(areq_ctx->tail
+ + ctx->reqoff);
+ u8 *iv = (u8 *)abreq - crypto_ablkcipher_ivsize(ctx->enc);
err = crypto_authenc_genicv(areq, iv, 0);
}
- aead_request_complete(areq, err);
+ authenc_request_complete(areq, err);
}
static int crypto_authenc_encrypt(struct aead_request *req)
{
struct crypto_aead *authenc = crypto_aead_reqtfm(req);
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
- struct ablkcipher_request *abreq = aead_request_ctx(req);
+ struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
struct crypto_ablkcipher *enc = ctx->enc;
struct scatterlist *dst = req->dst;
unsigned int cryptlen = req->cryptlen;
- u8 *iv = (u8 *)(abreq + 1) + crypto_ablkcipher_reqsize(enc);
+ struct ablkcipher_request *abreq = (void *)(areq_ctx->tail
+ + ctx->reqoff);
+ u8 *iv = (u8 *)abreq - crypto_ablkcipher_ivsize(enc);
int err;
ablkcipher_request_set_tfm(abreq, enc);
@@ -229,7 +429,7 @@ static void crypto_authenc_givencrypt_done(struct crypto_async_request *req,
err = crypto_authenc_genicv(areq, greq->giv, 0);
}
- aead_request_complete(areq, err);
+ authenc_request_complete(areq, err);
}
static int crypto_authenc_givencrypt(struct aead_givcrypt_request *req)
@@ -256,33 +456,40 @@ static int crypto_authenc_givencrypt(struct aead_givcrypt_request *req)
}
static int crypto_authenc_verify(struct aead_request *req,
- struct scatterlist *cipher,
- unsigned int cryptlen)
+ authenc_ahash_t authenc_ahash_fn)
{
struct crypto_aead *authenc = crypto_aead_reqtfm(req);
+ struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
u8 *ohash;
u8 *ihash;
unsigned int authsize;
- ohash = crypto_authenc_hash(req, CRYPTO_TFM_REQ_MAY_SLEEP, cipher,
- cryptlen);
+ areq_ctx->complete = authenc_verify_ahash_done;
+ areq_ctx->update_complete = authenc_verify_ahash_update_done;
+
+ ohash = authenc_ahash_fn(req, CRYPTO_TFM_REQ_MAY_SLEEP);
if (IS_ERR(ohash))
return PTR_ERR(ohash);
authsize = crypto_aead_authsize(authenc);
ihash = ohash + authsize;
- scatterwalk_map_and_copy(ihash, cipher, cryptlen, authsize, 0);
- return memcmp(ihash, ohash, authsize) ? -EBADMSG: 0;
+ scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
+ authsize, 0);
+ return crypto_memneq(ihash, ohash, authsize) ? -EBADMSG : 0;
}
static int crypto_authenc_iverify(struct aead_request *req, u8 *iv,
unsigned int cryptlen)
{
struct crypto_aead *authenc = crypto_aead_reqtfm(req);
+ struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
struct scatterlist *src = req->src;
- struct scatterlist cipher[2];
- struct page *srcp;
+ struct scatterlist *assoc = req->assoc;
+ struct scatterlist *cipher = areq_ctx->cipher;
+ struct scatterlist *asg = areq_ctx->asg;
unsigned int ivsize = crypto_aead_ivsize(authenc);
+ authenc_ahash_t authenc_ahash_fn = crypto_authenc_ahash_fb;
+ struct page *srcp;
u8 *vsrc;
srcp = sg_page(src);
@@ -291,11 +498,24 @@ static int crypto_authenc_iverify(struct aead_request *req, u8 *iv,
if (ivsize) {
sg_init_table(cipher, 2);
sg_set_buf(cipher, iv, ivsize);
- authenc_chain(cipher, src, vsrc == iv + ivsize);
+ scatterwalk_crypto_chain(cipher, src, vsrc == iv + ivsize, 2);
src = cipher;
+ cryptlen += ivsize;
+ }
+
+ if (req->assoclen && sg_is_last(assoc)) {
+ authenc_ahash_fn = crypto_authenc_ahash;
+ sg_init_table(asg, 2);
+ sg_set_page(asg, sg_page(assoc), assoc->length, assoc->offset);
+ scatterwalk_crypto_chain(asg, src, 0, 2);
+ src = asg;
+ cryptlen += req->assoclen;
}
- return crypto_authenc_verify(req, src, cryptlen + ivsize);
+ areq_ctx->cryptlen = cryptlen;
+ areq_ctx->sg = src;
+
+ return crypto_authenc_verify(req, authenc_ahash_fn);
}
static int crypto_authenc_decrypt(struct aead_request *req)
@@ -326,38 +546,42 @@ static int crypto_authenc_decrypt(struct aead_request *req)
static int crypto_authenc_init_tfm(struct crypto_tfm *tfm)
{
- struct crypto_instance *inst = (void *)tfm->__crt_alg;
+ struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
struct authenc_instance_ctx *ictx = crypto_instance_ctx(inst);
struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm);
- struct crypto_hash *auth;
+ struct crypto_ahash *auth;
struct crypto_ablkcipher *enc;
int err;
- auth = crypto_spawn_hash(&ictx->auth);
+ auth = crypto_spawn_ahash(&ictx->auth);
if (IS_ERR(auth))
return PTR_ERR(auth);
enc = crypto_spawn_skcipher(&ictx->enc);
err = PTR_ERR(enc);
if (IS_ERR(enc))
- goto err_free_hash;
+ goto err_free_ahash;
ctx->auth = auth;
ctx->enc = enc;
- tfm->crt_aead.reqsize = max_t(unsigned int,
- (crypto_hash_alignmask(auth) &
- ~(crypto_tfm_ctx_alignment() - 1)) +
- crypto_hash_digestsize(auth) * 2,
- sizeof(struct skcipher_givcrypt_request) +
- crypto_ablkcipher_reqsize(enc) +
- crypto_ablkcipher_ivsize(enc));
- spin_lock_init(&ctx->auth_lock);
+ ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth) +
+ crypto_ahash_alignmask(auth),
+ crypto_ahash_alignmask(auth) + 1) +
+ crypto_ablkcipher_ivsize(enc);
+
+ tfm->crt_aead.reqsize = sizeof(struct authenc_request_ctx) +
+ ctx->reqoff +
+ max_t(unsigned int,
+ crypto_ahash_reqsize(auth) +
+ sizeof(struct ahash_request),
+ sizeof(struct skcipher_givcrypt_request) +
+ crypto_ablkcipher_reqsize(enc));
return 0;
-err_free_hash:
- crypto_free_hash(auth);
+err_free_ahash:
+ crypto_free_ahash(auth);
return err;
}
@@ -365,7 +589,7 @@ static void crypto_authenc_exit_tfm(struct crypto_tfm *tfm)
{
struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm);
- crypto_free_hash(ctx->auth);
+ crypto_free_ahash(ctx->auth);
crypto_free_ablkcipher(ctx->enc);
}
@@ -373,24 +597,26 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
{
struct crypto_attr_type *algt;
struct crypto_instance *inst;
- struct crypto_alg *auth;
+ struct hash_alg_common *auth;
+ struct crypto_alg *auth_base;
struct crypto_alg *enc;
struct authenc_instance_ctx *ctx;
const char *enc_name;
int err;
algt = crypto_get_attr_type(tb);
- err = PTR_ERR(algt);
if (IS_ERR(algt))
- return ERR_PTR(err);
+ return ERR_CAST(algt);
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return ERR_PTR(-EINVAL);
- auth = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
- CRYPTO_ALG_TYPE_HASH_MASK);
+ auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
+ CRYPTO_ALG_TYPE_AHASH_MASK);
if (IS_ERR(auth))
- return ERR_PTR(PTR_ERR(auth));
+ return ERR_CAST(auth);
+
+ auth_base = &auth->base;
enc_name = crypto_attr_alg_name(tb[2]);
err = PTR_ERR(enc_name);
@@ -404,7 +630,7 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
ctx = crypto_instance_ctx(inst);
- err = crypto_init_spawn(&ctx->auth, auth, inst, CRYPTO_ALG_TYPE_MASK);
+ err = crypto_init_ahash_spawn(&ctx->auth, auth, inst);
if (err)
goto err_free_inst;
@@ -419,28 +645,25 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
err = -ENAMETOOLONG;
if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
- "authenc(%s,%s)", auth->cra_name, enc->cra_name) >=
+ "authenc(%s,%s)", auth_base->cra_name, enc->cra_name) >=
CRYPTO_MAX_ALG_NAME)
goto err_drop_enc;
if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
- "authenc(%s,%s)", auth->cra_driver_name,
+ "authenc(%s,%s)", auth_base->cra_driver_name,
enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
goto err_drop_enc;
inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
inst->alg.cra_flags |= enc->cra_flags & CRYPTO_ALG_ASYNC;
- inst->alg.cra_priority = enc->cra_priority * 10 + auth->cra_priority;
+ inst->alg.cra_priority = enc->cra_priority *
+ 10 + auth_base->cra_priority;
inst->alg.cra_blocksize = enc->cra_blocksize;
- inst->alg.cra_alignmask = auth->cra_alignmask | enc->cra_alignmask;
+ inst->alg.cra_alignmask = auth_base->cra_alignmask | enc->cra_alignmask;
inst->alg.cra_type = &crypto_aead_type;
inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize;
- inst->alg.cra_aead.maxauthsize = auth->cra_type == &crypto_hash_type ?
- auth->cra_hash.digestsize :
- auth->cra_type ?
- __crypto_shash_alg(auth)->digestsize :
- auth->cra_digest.dia_digestsize;
+ inst->alg.cra_aead.maxauthsize = auth->digestsize;
inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx);
@@ -453,13 +676,13 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
inst->alg.cra_aead.givencrypt = crypto_authenc_givencrypt;
out:
- crypto_mod_put(auth);
+ crypto_mod_put(auth_base);
return inst;
err_drop_enc:
crypto_drop_skcipher(&ctx->enc);
err_drop_auth:
- crypto_drop_spawn(&ctx->auth);
+ crypto_drop_ahash(&ctx->auth);
err_free_inst:
kfree(inst);
out_put_auth:
@@ -472,7 +695,7 @@ static void crypto_authenc_free(struct crypto_instance *inst)
struct authenc_instance_ctx *ctx = crypto_instance_ctx(inst);
crypto_drop_skcipher(&ctx->enc);
- crypto_drop_spawn(&ctx->auth);
+ crypto_drop_ahash(&ctx->auth);
kfree(inst);
}
diff --git a/crypto/authencesn.c b/crypto/authencesn.c
new file mode 100644
index 00000000000..4be0dd4373a
--- /dev/null
+++ b/crypto/authencesn.c
@@ -0,0 +1,816 @@
+/*
+ * authencesn.c - AEAD wrapper for IPsec with extended sequence numbers,
+ * derived from authenc.c
+ *
+ * Copyright (C) 2010 secunet Security Networks AG
+ * Copyright (C) 2010 Steffen Klassert <steffen.klassert@secunet.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <crypto/aead.h>
+#include <crypto/internal/hash.h>
+#include <crypto/internal/skcipher.h>
+#include <crypto/authenc.h>
+#include <crypto/scatterwalk.h>
+#include <linux/err.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+#include <linux/rtnetlink.h>
+#include <linux/slab.h>
+#include <linux/spinlock.h>
+
+struct authenc_esn_instance_ctx {
+ struct crypto_ahash_spawn auth;
+ struct crypto_skcipher_spawn enc;
+};
+
+struct crypto_authenc_esn_ctx {
+ unsigned int reqoff;
+ struct crypto_ahash *auth;
+ struct crypto_ablkcipher *enc;
+};
+
+struct authenc_esn_request_ctx {
+ unsigned int cryptlen;
+ unsigned int headlen;
+ unsigned int trailen;
+ struct scatterlist *sg;
+ struct scatterlist hsg[2];
+ struct scatterlist tsg[1];
+ struct scatterlist cipher[2];
+ crypto_completion_t complete;
+ crypto_completion_t update_complete;
+ crypto_completion_t update_complete2;
+ char tail[];
+};
+
+static void authenc_esn_request_complete(struct aead_request *req, int err)
+{
+ if (err != -EINPROGRESS)
+ aead_request_complete(req, err);
+}
+
+static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *key,
+ unsigned int keylen)
+{
+ struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
+ struct crypto_ahash *auth = ctx->auth;
+ struct crypto_ablkcipher *enc = ctx->enc;
+ struct crypto_authenc_keys keys;
+ int err = -EINVAL;
+
+ if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
+ goto badkey;
+
+ crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
+ crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc_esn) &
+ CRYPTO_TFM_REQ_MASK);
+ err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen);
+ crypto_aead_set_flags(authenc_esn, crypto_ahash_get_flags(auth) &
+ CRYPTO_TFM_RES_MASK);
+
+ if (err)
+ goto out;
+
+ crypto_ablkcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
+ crypto_ablkcipher_set_flags(enc, crypto_aead_get_flags(authenc_esn) &
+ CRYPTO_TFM_REQ_MASK);
+ err = crypto_ablkcipher_setkey(enc, keys.enckey, keys.enckeylen);
+ crypto_aead_set_flags(authenc_esn, crypto_ablkcipher_get_flags(enc) &
+ CRYPTO_TFM_RES_MASK);
+
+out:
+ return err;
+
+badkey:
+ crypto_aead_set_flags(authenc_esn, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ goto out;
+}
+
+static void authenc_esn_geniv_ahash_update_done(struct crypto_async_request *areq,
+ int err)
+{
+ struct aead_request *req = areq->data;
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
+ struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
+ struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
+
+ if (err)
+ goto out;
+
+ ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result,
+ areq_ctx->cryptlen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) &
+ CRYPTO_TFM_REQ_MAY_SLEEP,
+ areq_ctx->update_complete2, req);
+
+ err = crypto_ahash_update(ahreq);
+ if (err)
+ goto out;
+
+ ahash_request_set_crypt(ahreq, areq_ctx->tsg, ahreq->result,
+ areq_ctx->trailen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) &
+ CRYPTO_TFM_REQ_MAY_SLEEP,
+ areq_ctx->complete, req);
+
+ err = crypto_ahash_finup(ahreq);
+ if (err)
+ goto out;
+
+ scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg,
+ areq_ctx->cryptlen,
+ crypto_aead_authsize(authenc_esn), 1);
+
+out:
+ authenc_esn_request_complete(req, err);
+}
+
+static void authenc_esn_geniv_ahash_update_done2(struct crypto_async_request *areq,
+ int err)
+{
+ struct aead_request *req = areq->data;
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
+ struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
+ struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
+
+ if (err)
+ goto out;
+
+ ahash_request_set_crypt(ahreq, areq_ctx->tsg, ahreq->result,
+ areq_ctx->trailen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) &
+ CRYPTO_TFM_REQ_MAY_SLEEP,
+ areq_ctx->complete, req);
+
+ err = crypto_ahash_finup(ahreq);
+ if (err)
+ goto out;
+
+ scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg,
+ areq_ctx->cryptlen,
+ crypto_aead_authsize(authenc_esn), 1);
+
+out:
+ authenc_esn_request_complete(req, err);
+}
+
+
+static void authenc_esn_geniv_ahash_done(struct crypto_async_request *areq,
+ int err)
+{
+ struct aead_request *req = areq->data;
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
+ struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
+ struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
+
+ if (err)
+ goto out;
+
+ scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg,
+ areq_ctx->cryptlen,
+ crypto_aead_authsize(authenc_esn), 1);
+
+out:
+ aead_request_complete(req, err);
+}
+
+
+static void authenc_esn_verify_ahash_update_done(struct crypto_async_request *areq,
+ int err)
+{
+ u8 *ihash;
+ unsigned int authsize;
+ struct ablkcipher_request *abreq;
+ struct aead_request *req = areq->data;
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
+ struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
+ struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
+ unsigned int cryptlen = req->cryptlen;
+
+ if (err)
+ goto out;
+
+ ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result,
+ areq_ctx->cryptlen);
+
+ ahash_request_set_callback(ahreq,
+ aead_request_flags(req) &
+ CRYPTO_TFM_REQ_MAY_SLEEP,
+ areq_ctx->update_complete2, req);
+
+ err = crypto_ahash_update(ahreq);
+ if (err)
+ goto out;
+
+ ahash_request_set_crypt(ahreq, areq_ctx->tsg, ahreq->result,
+ areq_ctx->trailen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) &
+ CRYPTO_TFM_REQ_MAY_SLEEP,
+ areq_ctx->complete, req);
+
+ err = crypto_ahash_finup(ahreq);
+ if (err)
+ goto out;
+
+ authsize = crypto_aead_authsize(authenc_esn);
+ cryptlen -= authsize;
+ ihash = ahreq->result + authsize;
+ scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
+ authsize, 0);
+
+ err = crypto_memneq(ihash, ahreq->result, authsize) ? -EBADMSG : 0;
+ if (err)
+ goto out;
+
+ abreq = aead_request_ctx(req);
+ ablkcipher_request_set_tfm(abreq, ctx->enc);
+ ablkcipher_request_set_callback(abreq, aead_request_flags(req),
+ req->base.complete, req->base.data);
+ ablkcipher_request_set_crypt(abreq, req->src, req->dst,
+ cryptlen, req->iv);
+
+ err = crypto_ablkcipher_decrypt(abreq);
+
+out:
+ authenc_esn_request_complete(req, err);
+}
+
+static void authenc_esn_verify_ahash_update_done2(struct crypto_async_request *areq,
+ int err)
+{
+ u8 *ihash;
+ unsigned int authsize;
+ struct ablkcipher_request *abreq;
+ struct aead_request *req = areq->data;
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
+ struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
+ struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
+ unsigned int cryptlen = req->cryptlen;
+
+ if (err)
+ goto out;
+
+ ahash_request_set_crypt(ahreq, areq_ctx->tsg, ahreq->result,
+ areq_ctx->trailen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) &
+ CRYPTO_TFM_REQ_MAY_SLEEP,
+ areq_ctx->complete, req);
+
+ err = crypto_ahash_finup(ahreq);
+ if (err)
+ goto out;
+
+ authsize = crypto_aead_authsize(authenc_esn);
+ cryptlen -= authsize;
+ ihash = ahreq->result + authsize;
+ scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
+ authsize, 0);
+
+ err = crypto_memneq(ihash, ahreq->result, authsize) ? -EBADMSG : 0;
+ if (err)
+ goto out;
+
+ abreq = aead_request_ctx(req);
+ ablkcipher_request_set_tfm(abreq, ctx->enc);
+ ablkcipher_request_set_callback(abreq, aead_request_flags(req),
+ req->base.complete, req->base.data);
+ ablkcipher_request_set_crypt(abreq, req->src, req->dst,
+ cryptlen, req->iv);
+
+ err = crypto_ablkcipher_decrypt(abreq);
+
+out:
+ authenc_esn_request_complete(req, err);
+}
+
+
+static void authenc_esn_verify_ahash_done(struct crypto_async_request *areq,
+ int err)
+{
+ u8 *ihash;
+ unsigned int authsize;
+ struct ablkcipher_request *abreq;
+ struct aead_request *req = areq->data;
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
+ struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
+ struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
+ unsigned int cryptlen = req->cryptlen;
+
+ if (err)
+ goto out;
+
+ authsize = crypto_aead_authsize(authenc_esn);
+ cryptlen -= authsize;
+ ihash = ahreq->result + authsize;
+ scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
+ authsize, 0);
+
+ err = crypto_memneq(ihash, ahreq->result, authsize) ? -EBADMSG : 0;
+ if (err)
+ goto out;
+
+ abreq = aead_request_ctx(req);
+ ablkcipher_request_set_tfm(abreq, ctx->enc);
+ ablkcipher_request_set_callback(abreq, aead_request_flags(req),
+ req->base.complete, req->base.data);
+ ablkcipher_request_set_crypt(abreq, req->src, req->dst,
+ cryptlen, req->iv);
+
+ err = crypto_ablkcipher_decrypt(abreq);
+
+out:
+ authenc_esn_request_complete(req, err);
+}
+
+static u8 *crypto_authenc_esn_ahash(struct aead_request *req,
+ unsigned int flags)
+{
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
+ struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
+ struct crypto_ahash *auth = ctx->auth;
+ struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
+ u8 *hash = areq_ctx->tail;
+ int err;
+
+ hash = (u8 *)ALIGN((unsigned long)hash + crypto_ahash_alignmask(auth),
+ crypto_ahash_alignmask(auth) + 1);
+
+ ahash_request_set_tfm(ahreq, auth);
+
+ err = crypto_ahash_init(ahreq);
+ if (err)
+ return ERR_PTR(err);
+
+ ahash_request_set_crypt(ahreq, areq_ctx->hsg, hash, areq_ctx->headlen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
+ areq_ctx->update_complete, req);
+
+ err = crypto_ahash_update(ahreq);
+ if (err)
+ return ERR_PTR(err);
+
+ ahash_request_set_crypt(ahreq, areq_ctx->sg, hash, areq_ctx->cryptlen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
+ areq_ctx->update_complete2, req);
+
+ err = crypto_ahash_update(ahreq);
+ if (err)
+ return ERR_PTR(err);
+
+ ahash_request_set_crypt(ahreq, areq_ctx->tsg, hash,
+ areq_ctx->trailen);
+ ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
+ areq_ctx->complete, req);
+
+ err = crypto_ahash_finup(ahreq);
+ if (err)
+ return ERR_PTR(err);
+
+ return hash;
+}
+
+static int crypto_authenc_esn_genicv(struct aead_request *req, u8 *iv,
+ unsigned int flags)
+{
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
+ struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct scatterlist *dst = req->dst;
+ struct scatterlist *assoc = req->assoc;
+ struct scatterlist *cipher = areq_ctx->cipher;
+ struct scatterlist *hsg = areq_ctx->hsg;
+ struct scatterlist *tsg = areq_ctx->tsg;
+ struct scatterlist *assoc1;
+ struct scatterlist *assoc2;
+ unsigned int ivsize = crypto_aead_ivsize(authenc_esn);
+ unsigned int cryptlen = req->cryptlen;
+ struct page *dstp;
+ u8 *vdst;
+ u8 *hash;
+
+ dstp = sg_page(dst);
+ vdst = PageHighMem(dstp) ? NULL : page_address(dstp) + dst->offset;
+
+ if (ivsize) {
+ sg_init_table(cipher, 2);
+ sg_set_buf(cipher, iv, ivsize);
+ scatterwalk_crypto_chain(cipher, dst, vdst == iv + ivsize, 2);
+ dst = cipher;
+ cryptlen += ivsize;
+ }
+
+ if (sg_is_last(assoc))
+ return -EINVAL;
+
+ assoc1 = assoc + 1;
+ if (sg_is_last(assoc1))
+ return -EINVAL;
+
+ assoc2 = assoc + 2;
+ if (!sg_is_last(assoc2))
+ return -EINVAL;
+
+ sg_init_table(hsg, 2);
+ sg_set_page(hsg, sg_page(assoc), assoc->length, assoc->offset);
+ sg_set_page(hsg + 1, sg_page(assoc2), assoc2->length, assoc2->offset);
+
+ sg_init_table(tsg, 1);
+ sg_set_page(tsg, sg_page(assoc1), assoc1->length, assoc1->offset);
+
+ areq_ctx->cryptlen = cryptlen;
+ areq_ctx->headlen = assoc->length + assoc2->length;
+ areq_ctx->trailen = assoc1->length;
+ areq_ctx->sg = dst;
+
+ areq_ctx->complete = authenc_esn_geniv_ahash_done;
+ areq_ctx->update_complete = authenc_esn_geniv_ahash_update_done;
+ areq_ctx->update_complete2 = authenc_esn_geniv_ahash_update_done2;
+
+ hash = crypto_authenc_esn_ahash(req, flags);
+ if (IS_ERR(hash))
+ return PTR_ERR(hash);
+
+ scatterwalk_map_and_copy(hash, dst, cryptlen,
+ crypto_aead_authsize(authenc_esn), 1);
+ return 0;
+}
+
+
+static void crypto_authenc_esn_encrypt_done(struct crypto_async_request *req,
+ int err)
+{
+ struct aead_request *areq = req->data;
+
+ if (!err) {
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(areq);
+ struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
+ struct ablkcipher_request *abreq = aead_request_ctx(areq);
+ u8 *iv = (u8 *)(abreq + 1) +
+ crypto_ablkcipher_reqsize(ctx->enc);
+
+ err = crypto_authenc_esn_genicv(areq, iv, 0);
+ }
+
+ authenc_esn_request_complete(areq, err);
+}
+
+static int crypto_authenc_esn_encrypt(struct aead_request *req)
+{
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
+ struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
+ struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct crypto_ablkcipher *enc = ctx->enc;
+ struct scatterlist *dst = req->dst;
+ unsigned int cryptlen = req->cryptlen;
+ struct ablkcipher_request *abreq = (void *)(areq_ctx->tail
+ + ctx->reqoff);
+ u8 *iv = (u8 *)abreq - crypto_ablkcipher_ivsize(enc);
+ int err;
+
+ ablkcipher_request_set_tfm(abreq, enc);
+ ablkcipher_request_set_callback(abreq, aead_request_flags(req),
+ crypto_authenc_esn_encrypt_done, req);
+ ablkcipher_request_set_crypt(abreq, req->src, dst, cryptlen, req->iv);
+
+ memcpy(iv, req->iv, crypto_aead_ivsize(authenc_esn));
+
+ err = crypto_ablkcipher_encrypt(abreq);
+ if (err)
+ return err;
+
+ return crypto_authenc_esn_genicv(req, iv, CRYPTO_TFM_REQ_MAY_SLEEP);
+}
+
+static void crypto_authenc_esn_givencrypt_done(struct crypto_async_request *req,
+ int err)
+{
+ struct aead_request *areq = req->data;
+
+ if (!err) {
+ struct skcipher_givcrypt_request *greq = aead_request_ctx(areq);
+
+ err = crypto_authenc_esn_genicv(areq, greq->giv, 0);
+ }
+
+ authenc_esn_request_complete(areq, err);
+}
+
+static int crypto_authenc_esn_givencrypt(struct aead_givcrypt_request *req)
+{
+ struct crypto_aead *authenc_esn = aead_givcrypt_reqtfm(req);
+ struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
+ struct aead_request *areq = &req->areq;
+ struct skcipher_givcrypt_request *greq = aead_request_ctx(areq);
+ u8 *iv = req->giv;
+ int err;
+
+ skcipher_givcrypt_set_tfm(greq, ctx->enc);
+ skcipher_givcrypt_set_callback(greq, aead_request_flags(areq),
+ crypto_authenc_esn_givencrypt_done, areq);
+ skcipher_givcrypt_set_crypt(greq, areq->src, areq->dst, areq->cryptlen,
+ areq->iv);
+ skcipher_givcrypt_set_giv(greq, iv, req->seq);
+
+ err = crypto_skcipher_givencrypt(greq);
+ if (err)
+ return err;
+
+ return crypto_authenc_esn_genicv(areq, iv, CRYPTO_TFM_REQ_MAY_SLEEP);
+}
+
+static int crypto_authenc_esn_verify(struct aead_request *req)
+{
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
+ struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
+ u8 *ohash;
+ u8 *ihash;
+ unsigned int authsize;
+
+ areq_ctx->complete = authenc_esn_verify_ahash_done;
+ areq_ctx->update_complete = authenc_esn_verify_ahash_update_done;
+
+ ohash = crypto_authenc_esn_ahash(req, CRYPTO_TFM_REQ_MAY_SLEEP);
+ if (IS_ERR(ohash))
+ return PTR_ERR(ohash);
+
+ authsize = crypto_aead_authsize(authenc_esn);
+ ihash = ohash + authsize;
+ scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen,
+ authsize, 0);
+ return crypto_memneq(ihash, ohash, authsize) ? -EBADMSG : 0;
+}
+
+static int crypto_authenc_esn_iverify(struct aead_request *req, u8 *iv,
+ unsigned int cryptlen)
+{
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
+ struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
+ struct scatterlist *src = req->src;
+ struct scatterlist *assoc = req->assoc;
+ struct scatterlist *cipher = areq_ctx->cipher;
+ struct scatterlist *hsg = areq_ctx->hsg;
+ struct scatterlist *tsg = areq_ctx->tsg;
+ struct scatterlist *assoc1;
+ struct scatterlist *assoc2;
+ unsigned int ivsize = crypto_aead_ivsize(authenc_esn);
+ struct page *srcp;
+ u8 *vsrc;
+
+ srcp = sg_page(src);
+ vsrc = PageHighMem(srcp) ? NULL : page_address(srcp) + src->offset;
+
+ if (ivsize) {
+ sg_init_table(cipher, 2);
+ sg_set_buf(cipher, iv, ivsize);
+ scatterwalk_crypto_chain(cipher, src, vsrc == iv + ivsize, 2);
+ src = cipher;
+ cryptlen += ivsize;
+ }
+
+ if (sg_is_last(assoc))
+ return -EINVAL;
+
+ assoc1 = assoc + 1;
+ if (sg_is_last(assoc1))
+ return -EINVAL;
+
+ assoc2 = assoc + 2;
+ if (!sg_is_last(assoc2))
+ return -EINVAL;
+
+ sg_init_table(hsg, 2);
+ sg_set_page(hsg, sg_page(assoc), assoc->length, assoc->offset);
+ sg_set_page(hsg + 1, sg_page(assoc2), assoc2->length, assoc2->offset);
+
+ sg_init_table(tsg, 1);
+ sg_set_page(tsg, sg_page(assoc1), assoc1->length, assoc1->offset);
+
+ areq_ctx->cryptlen = cryptlen;
+ areq_ctx->headlen = assoc->length + assoc2->length;
+ areq_ctx->trailen = assoc1->length;
+ areq_ctx->sg = src;
+
+ areq_ctx->complete = authenc_esn_verify_ahash_done;
+ areq_ctx->update_complete = authenc_esn_verify_ahash_update_done;
+ areq_ctx->update_complete2 = authenc_esn_verify_ahash_update_done2;
+
+ return crypto_authenc_esn_verify(req);
+}
+
+static int crypto_authenc_esn_decrypt(struct aead_request *req)
+{
+ struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
+ struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
+ struct ablkcipher_request *abreq = aead_request_ctx(req);
+ unsigned int cryptlen = req->cryptlen;
+ unsigned int authsize = crypto_aead_authsize(authenc_esn);
+ u8 *iv = req->iv;
+ int err;
+
+ if (cryptlen < authsize)
+ return -EINVAL;
+ cryptlen -= authsize;
+
+ err = crypto_authenc_esn_iverify(req, iv, cryptlen);
+ if (err)
+ return err;
+
+ ablkcipher_request_set_tfm(abreq, ctx->enc);
+ ablkcipher_request_set_callback(abreq, aead_request_flags(req),
+ req->base.complete, req->base.data);
+ ablkcipher_request_set_crypt(abreq, req->src, req->dst, cryptlen, iv);
+
+ return crypto_ablkcipher_decrypt(abreq);
+}
+
+static int crypto_authenc_esn_init_tfm(struct crypto_tfm *tfm)
+{
+ struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
+ struct authenc_esn_instance_ctx *ictx = crypto_instance_ctx(inst);
+ struct crypto_authenc_esn_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct crypto_ahash *auth;
+ struct crypto_ablkcipher *enc;
+ int err;
+
+ auth = crypto_spawn_ahash(&ictx->auth);
+ if (IS_ERR(auth))
+ return PTR_ERR(auth);
+
+ enc = crypto_spawn_skcipher(&ictx->enc);
+ err = PTR_ERR(enc);
+ if (IS_ERR(enc))
+ goto err_free_ahash;
+
+ ctx->auth = auth;
+ ctx->enc = enc;
+
+ ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth) +
+ crypto_ahash_alignmask(auth),
+ crypto_ahash_alignmask(auth) + 1) +
+ crypto_ablkcipher_ivsize(enc);
+
+ tfm->crt_aead.reqsize = sizeof(struct authenc_esn_request_ctx) +
+ ctx->reqoff +
+ max_t(unsigned int,
+ crypto_ahash_reqsize(auth) +
+ sizeof(struct ahash_request),
+ sizeof(struct skcipher_givcrypt_request) +
+ crypto_ablkcipher_reqsize(enc));
+
+ return 0;
+
+err_free_ahash:
+ crypto_free_ahash(auth);
+ return err;
+}
+
+static void crypto_authenc_esn_exit_tfm(struct crypto_tfm *tfm)
+{
+ struct crypto_authenc_esn_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ crypto_free_ahash(ctx->auth);
+ crypto_free_ablkcipher(ctx->enc);
+}
+
+static struct crypto_instance *crypto_authenc_esn_alloc(struct rtattr **tb)
+{
+ struct crypto_attr_type *algt;
+ struct crypto_instance *inst;
+ struct hash_alg_common *auth;
+ struct crypto_alg *auth_base;
+ struct crypto_alg *enc;
+ struct authenc_esn_instance_ctx *ctx;
+ const char *enc_name;
+ int err;
+
+ algt = crypto_get_attr_type(tb);
+ if (IS_ERR(algt))
+ return ERR_CAST(algt);
+
+ if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
+ return ERR_PTR(-EINVAL);
+
+ auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
+ CRYPTO_ALG_TYPE_AHASH_MASK);
+ if (IS_ERR(auth))
+ return ERR_CAST(auth);
+
+ auth_base = &auth->base;
+
+ enc_name = crypto_attr_alg_name(tb[2]);
+ err = PTR_ERR(enc_name);
+ if (IS_ERR(enc_name))
+ goto out_put_auth;
+
+ inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
+ err = -ENOMEM;
+ if (!inst)
+ goto out_put_auth;
+
+ ctx = crypto_instance_ctx(inst);
+
+ err = crypto_init_ahash_spawn(&ctx->auth, auth, inst);
+ if (err)
+ goto err_free_inst;
+
+ crypto_set_skcipher_spawn(&ctx->enc, inst);
+ err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
+ crypto_requires_sync(algt->type,
+ algt->mask));
+ if (err)
+ goto err_drop_auth;
+
+ enc = crypto_skcipher_spawn_alg(&ctx->enc);
+
+ err = -ENAMETOOLONG;
+ if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
+ "authencesn(%s,%s)", auth_base->cra_name, enc->cra_name) >=
+ CRYPTO_MAX_ALG_NAME)
+ goto err_drop_enc;
+
+ if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+ "authencesn(%s,%s)", auth_base->cra_driver_name,
+ enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
+ goto err_drop_enc;
+
+ inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
+ inst->alg.cra_flags |= enc->cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.cra_priority = enc->cra_priority *
+ 10 + auth_base->cra_priority;
+ inst->alg.cra_blocksize = enc->cra_blocksize;
+ inst->alg.cra_alignmask = auth_base->cra_alignmask | enc->cra_alignmask;
+ inst->alg.cra_type = &crypto_aead_type;
+
+ inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize;
+ inst->alg.cra_aead.maxauthsize = auth->digestsize;
+
+ inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_esn_ctx);
+
+ inst->alg.cra_init = crypto_authenc_esn_init_tfm;
+ inst->alg.cra_exit = crypto_authenc_esn_exit_tfm;
+
+ inst->alg.cra_aead.setkey = crypto_authenc_esn_setkey;
+ inst->alg.cra_aead.encrypt = crypto_authenc_esn_encrypt;
+ inst->alg.cra_aead.decrypt = crypto_authenc_esn_decrypt;
+ inst->alg.cra_aead.givencrypt = crypto_authenc_esn_givencrypt;
+
+out:
+ crypto_mod_put(auth_base);
+ return inst;
+
+err_drop_enc:
+ crypto_drop_skcipher(&ctx->enc);
+err_drop_auth:
+ crypto_drop_ahash(&ctx->auth);
+err_free_inst:
+ kfree(inst);
+out_put_auth:
+ inst = ERR_PTR(err);
+ goto out;
+}
+
+static void crypto_authenc_esn_free(struct crypto_instance *inst)
+{
+ struct authenc_esn_instance_ctx *ctx = crypto_instance_ctx(inst);
+
+ crypto_drop_skcipher(&ctx->enc);
+ crypto_drop_ahash(&ctx->auth);
+ kfree(inst);
+}
+
+static struct crypto_template crypto_authenc_esn_tmpl = {
+ .name = "authencesn",
+ .alloc = crypto_authenc_esn_alloc,
+ .free = crypto_authenc_esn_free,
+ .module = THIS_MODULE,
+};
+
+static int __init crypto_authenc_esn_module_init(void)
+{
+ return crypto_register_template(&crypto_authenc_esn_tmpl);
+}
+
+static void __exit crypto_authenc_esn_module_exit(void)
+{
+ crypto_unregister_template(&crypto_authenc_esn_tmpl);
+}
+
+module_init(crypto_authenc_esn_module_init);
+module_exit(crypto_authenc_esn_module_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
+MODULE_DESCRIPTION("AEAD wrapper for IPsec with extended sequence numbers");
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c
index d70a41c002d..0122bec3856 100644
--- a/crypto/blkcipher.c
+++ b/crypto/blkcipher.c
@@ -24,6 +24,8 @@
#include <linux/seq_file.h>
#include <linux/slab.h>
#include <linux/string.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
#include "internal.h"
@@ -41,22 +43,22 @@ static int blkcipher_walk_first(struct blkcipher_desc *desc,
static inline void blkcipher_map_src(struct blkcipher_walk *walk)
{
- walk->src.virt.addr = scatterwalk_map(&walk->in, 0);
+ walk->src.virt.addr = scatterwalk_map(&walk->in);
}
static inline void blkcipher_map_dst(struct blkcipher_walk *walk)
{
- walk->dst.virt.addr = scatterwalk_map(&walk->out, 1);
+ walk->dst.virt.addr = scatterwalk_map(&walk->out);
}
static inline void blkcipher_unmap_src(struct blkcipher_walk *walk)
{
- scatterwalk_unmap(walk->src.virt.addr, 0);
+ scatterwalk_unmap(walk->src.virt.addr);
}
static inline void blkcipher_unmap_dst(struct blkcipher_walk *walk)
{
- scatterwalk_unmap(walk->dst.virt.addr, 1);
+ scatterwalk_unmap(walk->dst.virt.addr);
}
/* Get a spot of the specified length that does not straddle a page.
@@ -68,14 +70,12 @@ static inline u8 *blkcipher_get_spot(u8 *start, unsigned int len)
return max(start, end_page);
}
-static inline unsigned int blkcipher_done_slow(struct crypto_blkcipher *tfm,
- struct blkcipher_walk *walk,
+static inline unsigned int blkcipher_done_slow(struct blkcipher_walk *walk,
unsigned int bsize)
{
u8 *addr;
- unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
- addr = (u8 *)ALIGN((unsigned long)walk->buffer, alignmask + 1);
+ addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1);
addr = blkcipher_get_spot(addr, bsize);
scatterwalk_copychunks(addr, &walk->out, bsize, 1);
return bsize;
@@ -89,9 +89,9 @@ static inline unsigned int blkcipher_done_fast(struct blkcipher_walk *walk,
memcpy(walk->dst.virt.addr, walk->page, n);
blkcipher_unmap_dst(walk);
} else if (!(walk->flags & BLKCIPHER_WALK_PHYS)) {
- blkcipher_unmap_src(walk);
if (walk->flags & BLKCIPHER_WALK_DIFF)
blkcipher_unmap_dst(walk);
+ blkcipher_unmap_src(walk);
}
scatterwalk_advance(&walk->in, n);
@@ -103,7 +103,6 @@ static inline unsigned int blkcipher_done_fast(struct blkcipher_walk *walk,
int blkcipher_walk_done(struct blkcipher_desc *desc,
struct blkcipher_walk *walk, int err)
{
- struct crypto_blkcipher *tfm = desc->tfm;
unsigned int nbytes = 0;
if (likely(err >= 0)) {
@@ -115,7 +114,7 @@ int blkcipher_walk_done(struct blkcipher_desc *desc,
err = -EINVAL;
goto err;
} else
- n = blkcipher_done_slow(tfm, walk, n);
+ n = blkcipher_done_slow(walk, n);
nbytes = walk->total - n;
err = 0;
@@ -134,7 +133,7 @@ err:
}
if (walk->iv != desc->info)
- memcpy(desc->info, walk->iv, crypto_blkcipher_ivsize(tfm));
+ memcpy(desc->info, walk->iv, walk->ivsize);
if (walk->buffer != walk->page)
kfree(walk->buffer);
if (walk->page)
@@ -224,22 +223,20 @@ static inline int blkcipher_next_fast(struct blkcipher_desc *desc,
static int blkcipher_walk_next(struct blkcipher_desc *desc,
struct blkcipher_walk *walk)
{
- struct crypto_blkcipher *tfm = desc->tfm;
- unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
unsigned int bsize;
unsigned int n;
int err;
n = walk->total;
- if (unlikely(n < crypto_blkcipher_blocksize(tfm))) {
+ if (unlikely(n < walk->cipher_blocksize)) {
desc->flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
return blkcipher_walk_done(desc, walk, -EINVAL);
}
walk->flags &= ~(BLKCIPHER_WALK_SLOW | BLKCIPHER_WALK_COPY |
BLKCIPHER_WALK_DIFF);
- if (!scatterwalk_aligned(&walk->in, alignmask) ||
- !scatterwalk_aligned(&walk->out, alignmask)) {
+ if (!scatterwalk_aligned(&walk->in, walk->alignmask) ||
+ !scatterwalk_aligned(&walk->out, walk->alignmask)) {
walk->flags |= BLKCIPHER_WALK_COPY;
if (!walk->page) {
walk->page = (void *)__get_free_page(GFP_ATOMIC);
@@ -248,12 +245,12 @@ static int blkcipher_walk_next(struct blkcipher_desc *desc,
}
}
- bsize = min(walk->blocksize, n);
+ bsize = min(walk->walk_blocksize, n);
n = scatterwalk_clamp(&walk->in, n);
n = scatterwalk_clamp(&walk->out, n);
if (unlikely(n < bsize)) {
- err = blkcipher_next_slow(desc, walk, bsize, alignmask);
+ err = blkcipher_next_slow(desc, walk, bsize, walk->alignmask);
goto set_phys_lowmem;
}
@@ -275,28 +272,26 @@ set_phys_lowmem:
return err;
}
-static inline int blkcipher_copy_iv(struct blkcipher_walk *walk,
- struct crypto_blkcipher *tfm,
- unsigned int alignmask)
+static inline int blkcipher_copy_iv(struct blkcipher_walk *walk)
{
- unsigned bs = walk->blocksize;
- unsigned int ivsize = crypto_blkcipher_ivsize(tfm);
- unsigned aligned_bs = ALIGN(bs, alignmask + 1);
- unsigned int size = aligned_bs * 2 + ivsize + max(aligned_bs, ivsize) -
- (alignmask + 1);
+ unsigned bs = walk->walk_blocksize;
+ unsigned aligned_bs = ALIGN(bs, walk->alignmask + 1);
+ unsigned int size = aligned_bs * 2 +
+ walk->ivsize + max(aligned_bs, walk->ivsize) -
+ (walk->alignmask + 1);
u8 *iv;
- size += alignmask & ~(crypto_tfm_ctx_alignment() - 1);
+ size += walk->alignmask & ~(crypto_tfm_ctx_alignment() - 1);
walk->buffer = kmalloc(size, GFP_ATOMIC);
if (!walk->buffer)
return -ENOMEM;
- iv = (u8 *)ALIGN((unsigned long)walk->buffer, alignmask + 1);
+ iv = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1);
iv = blkcipher_get_spot(iv, bs) + aligned_bs;
iv = blkcipher_get_spot(iv, bs) + aligned_bs;
- iv = blkcipher_get_spot(iv, ivsize);
+ iv = blkcipher_get_spot(iv, walk->ivsize);
- walk->iv = memcpy(iv, walk->iv, ivsize);
+ walk->iv = memcpy(iv, walk->iv, walk->ivsize);
return 0;
}
@@ -304,7 +299,10 @@ int blkcipher_walk_virt(struct blkcipher_desc *desc,
struct blkcipher_walk *walk)
{
walk->flags &= ~BLKCIPHER_WALK_PHYS;
- walk->blocksize = crypto_blkcipher_blocksize(desc->tfm);
+ walk->walk_blocksize = crypto_blkcipher_blocksize(desc->tfm);
+ walk->cipher_blocksize = walk->walk_blocksize;
+ walk->ivsize = crypto_blkcipher_ivsize(desc->tfm);
+ walk->alignmask = crypto_blkcipher_alignmask(desc->tfm);
return blkcipher_walk_first(desc, walk);
}
EXPORT_SYMBOL_GPL(blkcipher_walk_virt);
@@ -313,7 +311,10 @@ int blkcipher_walk_phys(struct blkcipher_desc *desc,
struct blkcipher_walk *walk)
{
walk->flags |= BLKCIPHER_WALK_PHYS;
- walk->blocksize = crypto_blkcipher_blocksize(desc->tfm);
+ walk->walk_blocksize = crypto_blkcipher_blocksize(desc->tfm);
+ walk->cipher_blocksize = walk->walk_blocksize;
+ walk->ivsize = crypto_blkcipher_ivsize(desc->tfm);
+ walk->alignmask = crypto_blkcipher_alignmask(desc->tfm);
return blkcipher_walk_first(desc, walk);
}
EXPORT_SYMBOL_GPL(blkcipher_walk_phys);
@@ -321,9 +322,6 @@ EXPORT_SYMBOL_GPL(blkcipher_walk_phys);
static int blkcipher_walk_first(struct blkcipher_desc *desc,
struct blkcipher_walk *walk)
{
- struct crypto_blkcipher *tfm = desc->tfm;
- unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
-
if (WARN_ON_ONCE(in_irq()))
return -EDEADLK;
@@ -333,8 +331,8 @@ static int blkcipher_walk_first(struct blkcipher_desc *desc,
walk->buffer = NULL;
walk->iv = desc->info;
- if (unlikely(((unsigned long)walk->iv & alignmask))) {
- int err = blkcipher_copy_iv(walk, tfm, alignmask);
+ if (unlikely(((unsigned long)walk->iv & walk->alignmask))) {
+ int err = blkcipher_copy_iv(walk);
if (err)
return err;
}
@@ -351,11 +349,28 @@ int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
unsigned int blocksize)
{
walk->flags &= ~BLKCIPHER_WALK_PHYS;
- walk->blocksize = blocksize;
+ walk->walk_blocksize = blocksize;
+ walk->cipher_blocksize = crypto_blkcipher_blocksize(desc->tfm);
+ walk->ivsize = crypto_blkcipher_ivsize(desc->tfm);
+ walk->alignmask = crypto_blkcipher_alignmask(desc->tfm);
return blkcipher_walk_first(desc, walk);
}
EXPORT_SYMBOL_GPL(blkcipher_walk_virt_block);
+int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
+ struct blkcipher_walk *walk,
+ struct crypto_aead *tfm,
+ unsigned int blocksize)
+{
+ walk->flags &= ~BLKCIPHER_WALK_PHYS;
+ walk->walk_blocksize = blocksize;
+ walk->cipher_blocksize = crypto_aead_blocksize(tfm);
+ walk->ivsize = crypto_aead_ivsize(tfm);
+ walk->alignmask = crypto_aead_alignmask(tfm);
+ return blkcipher_walk_first(desc, walk);
+}
+EXPORT_SYMBOL_GPL(blkcipher_aead_walk_virt_block);
+
static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key,
unsigned int keylen)
{
@@ -492,6 +507,35 @@ static int crypto_init_blkcipher_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
return crypto_init_blkcipher_ops_async(tfm);
}
+#ifdef CONFIG_NET
+static int crypto_blkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_blkcipher rblkcipher;
+
+ strncpy(rblkcipher.type, "blkcipher", sizeof(rblkcipher.type));
+ strncpy(rblkcipher.geniv, alg->cra_blkcipher.geniv ?: "<default>",
+ sizeof(rblkcipher.geniv));
+
+ rblkcipher.blocksize = alg->cra_blocksize;
+ rblkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
+ rblkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
+ rblkcipher.ivsize = alg->cra_blkcipher.ivsize;
+
+ if (nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
+ sizeof(struct crypto_report_blkcipher), &rblkcipher))
+ goto nla_put_failure;
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_blkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
@@ -511,6 +555,7 @@ const struct crypto_type crypto_blkcipher_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_blkcipher_show,
#endif
+ .report = crypto_blkcipher_report,
};
EXPORT_SYMBOL_GPL(crypto_blkcipher_type);
@@ -521,7 +566,7 @@ static int crypto_grab_nivcipher(struct crypto_skcipher_spawn *spawn,
int err;
type = crypto_skcipher_type(type);
- mask = crypto_skcipher_mask(mask) | CRYPTO_ALG_GENIV;
+ mask = crypto_skcipher_mask(mask)| CRYPTO_ALG_GENIV;
alg = crypto_alg_mod_lookup(name, type, mask);
if (IS_ERR(alg))
@@ -556,18 +601,16 @@ struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl,
int err;
algt = crypto_get_attr_type(tb);
- err = PTR_ERR(algt);
if (IS_ERR(algt))
- return ERR_PTR(err);
+ return ERR_CAST(algt);
if ((algt->type ^ (CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV)) &
algt->mask)
return ERR_PTR(-EINVAL);
name = crypto_attr_alg_name(tb[1]);
- err = PTR_ERR(name);
if (IS_ERR(name))
- return ERR_PTR(err);
+ return ERR_CAST(name);
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
if (!inst)
diff --git a/crypto/blowfish.c b/crypto/blowfish_common.c
index 6f5b4873192..f636aab0209 100644
--- a/crypto/blowfish.c
+++ b/crypto/blowfish_common.c
@@ -1,6 +1,9 @@
-/*
+/*
* Cryptographic API.
*
+ * Common Blowfish algorithm parts shared between the c and assembler
+ * implementations.
+ *
* Blowfish Cipher Algorithm, by Bruce Schneier.
* http://www.counterpane.com/blowfish.html
*
@@ -22,15 +25,7 @@
#include <asm/byteorder.h>
#include <linux/crypto.h>
#include <linux/types.h>
-
-#define BF_BLOCK_SIZE 8
-#define BF_MIN_KEY_SIZE 4
-#define BF_MAX_KEY_SIZE 56
-
-struct bf_ctx {
- u32 p[18];
- u32 s[1024];
-};
+#include <crypto/blowfish.h>
static const u32 bf_pbox[16 + 2] = {
0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344,
@@ -299,7 +294,7 @@ static const u32 bf_sbox[256 * 4] = {
0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6,
};
-/*
+/*
* Round loop unrolling macros, S is a pointer to a S-Box array
* organized in 4 unsigned longs at a row.
*/
@@ -309,13 +304,13 @@ static const u32 bf_sbox[256 * 4] = {
#define GET32_0(x) (((x) >> (24)) & (0xff))
#define bf_F(x) (((S[GET32_0(x)] + S[256 + GET32_1(x)]) ^ \
- S[512 + GET32_2(x)]) + S[768 + GET32_3(x)])
+ S[512 + GET32_2(x)]) + S[768 + GET32_3(x)])
-#define ROUND(a, b, n) b ^= P[n]; a ^= bf_F (b)
+#define ROUND(a, b, n) ({ b ^= P[n]; a ^= bf_F(b); })
/*
* The blowfish encipher, processes 64-bit blocks.
- * NOTE: This function MUSTN'T respect endianess
+ * NOTE: This function MUSTN'T respect endianess
*/
static void encrypt_block(struct bf_ctx *bctx, u32 *dst, u32 *src)
{
@@ -348,57 +343,10 @@ static void encrypt_block(struct bf_ctx *bctx, u32 *dst, u32 *src)
dst[1] = yl;
}
-static void bf_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- const __be32 *in_blk = (const __be32 *)src;
- __be32 *const out_blk = (__be32 *)dst;
- u32 in32[2], out32[2];
-
- in32[0] = be32_to_cpu(in_blk[0]);
- in32[1] = be32_to_cpu(in_blk[1]);
- encrypt_block(crypto_tfm_ctx(tfm), out32, in32);
- out_blk[0] = cpu_to_be32(out32[0]);
- out_blk[1] = cpu_to_be32(out32[1]);
-}
-
-static void bf_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
- const __be32 *in_blk = (const __be32 *)src;
- __be32 *const out_blk = (__be32 *)dst;
- const u32 *P = ctx->p;
- const u32 *S = ctx->s;
- u32 yl = be32_to_cpu(in_blk[0]);
- u32 yr = be32_to_cpu(in_blk[1]);
-
- ROUND(yr, yl, 17);
- ROUND(yl, yr, 16);
- ROUND(yr, yl, 15);
- ROUND(yl, yr, 14);
- ROUND(yr, yl, 13);
- ROUND(yl, yr, 12);
- ROUND(yr, yl, 11);
- ROUND(yl, yr, 10);
- ROUND(yr, yl, 9);
- ROUND(yl, yr, 8);
- ROUND(yr, yl, 7);
- ROUND(yl, yr, 6);
- ROUND(yr, yl, 5);
- ROUND(yl, yr, 4);
- ROUND(yr, yl, 3);
- ROUND(yl, yr, 2);
-
- yl ^= P[1];
- yr ^= P[0];
-
- out_blk[0] = cpu_to_be32(yr);
- out_blk[1] = cpu_to_be32(yl);
-}
-
-/*
+/*
* Calculates the blowfish S and P boxes for encryption and decryption.
*/
-static int bf_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
+int blowfish_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
{
struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
u32 *P = ctx->p;
@@ -417,10 +365,10 @@ static int bf_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
/* Actual subkey generation */
for (j = 0, i = 0; i < 16 + 2; i++) {
- temp = (((u32 )key[j] << 24) |
- ((u32 )key[(j + 1) % keylen] << 16) |
- ((u32 )key[(j + 2) % keylen] << 8) |
- ((u32 )key[(j + 3) % keylen]));
+ temp = (((u32)key[j] << 24) |
+ ((u32)key[(j + 1) % keylen] << 16) |
+ ((u32)key[(j + 2) % keylen] << 8) |
+ ((u32)key[(j + 3) % keylen]));
P[i] = P[i] ^ temp;
j = (j + 4) % keylen;
@@ -444,39 +392,11 @@ static int bf_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
S[count + 1] = data[1];
}
}
-
+
/* Bruce says not to bother with the weak key check. */
return 0;
}
-
-static struct crypto_alg alg = {
- .cra_name = "blowfish",
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
- .cra_blocksize = BF_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct bf_ctx),
- .cra_alignmask = 3,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(alg.cra_list),
- .cra_u = { .cipher = {
- .cia_min_keysize = BF_MIN_KEY_SIZE,
- .cia_max_keysize = BF_MAX_KEY_SIZE,
- .cia_setkey = bf_setkey,
- .cia_encrypt = bf_encrypt,
- .cia_decrypt = bf_decrypt } }
-};
-
-static int __init blowfish_mod_init(void)
-{
- return crypto_register_alg(&alg);
-}
-
-static void __exit blowfish_mod_fini(void)
-{
- crypto_unregister_alg(&alg);
-}
-
-module_init(blowfish_mod_init);
-module_exit(blowfish_mod_fini);
+EXPORT_SYMBOL_GPL(blowfish_setkey);
MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION("Blowfish Cipher Algorithm");
+MODULE_DESCRIPTION("Blowfish Cipher common functions");
diff --git a/crypto/blowfish_generic.c b/crypto/blowfish_generic.c
new file mode 100644
index 00000000000..8baf5447d35
--- /dev/null
+++ b/crypto/blowfish_generic.c
@@ -0,0 +1,141 @@
+/*
+ * Cryptographic API.
+ *
+ * Blowfish Cipher Algorithm, by Bruce Schneier.
+ * http://www.counterpane.com/blowfish.html
+ *
+ * Adapted from Kerneli implementation.
+ *
+ * Copyright (c) Herbert Valerio Riedel <hvr@hvrlab.org>
+ * Copyright (c) Kyle McMartin <kyle@debian.org>
+ * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ */
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/mm.h>
+#include <asm/byteorder.h>
+#include <linux/crypto.h>
+#include <linux/types.h>
+#include <crypto/blowfish.h>
+
+/*
+ * Round loop unrolling macros, S is a pointer to a S-Box array
+ * organized in 4 unsigned longs at a row.
+ */
+#define GET32_3(x) (((x) & 0xff))
+#define GET32_2(x) (((x) >> (8)) & (0xff))
+#define GET32_1(x) (((x) >> (16)) & (0xff))
+#define GET32_0(x) (((x) >> (24)) & (0xff))
+
+#define bf_F(x) (((S[GET32_0(x)] + S[256 + GET32_1(x)]) ^ \
+ S[512 + GET32_2(x)]) + S[768 + GET32_3(x)])
+
+#define ROUND(a, b, n) ({ b ^= P[n]; a ^= bf_F(b); })
+
+static void bf_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+{
+ struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
+ const __be32 *in_blk = (const __be32 *)src;
+ __be32 *const out_blk = (__be32 *)dst;
+ const u32 *P = ctx->p;
+ const u32 *S = ctx->s;
+ u32 yl = be32_to_cpu(in_blk[0]);
+ u32 yr = be32_to_cpu(in_blk[1]);
+
+ ROUND(yr, yl, 0);
+ ROUND(yl, yr, 1);
+ ROUND(yr, yl, 2);
+ ROUND(yl, yr, 3);
+ ROUND(yr, yl, 4);
+ ROUND(yl, yr, 5);
+ ROUND(yr, yl, 6);
+ ROUND(yl, yr, 7);
+ ROUND(yr, yl, 8);
+ ROUND(yl, yr, 9);
+ ROUND(yr, yl, 10);
+ ROUND(yl, yr, 11);
+ ROUND(yr, yl, 12);
+ ROUND(yl, yr, 13);
+ ROUND(yr, yl, 14);
+ ROUND(yl, yr, 15);
+
+ yl ^= P[16];
+ yr ^= P[17];
+
+ out_blk[0] = cpu_to_be32(yr);
+ out_blk[1] = cpu_to_be32(yl);
+}
+
+static void bf_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+{
+ struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
+ const __be32 *in_blk = (const __be32 *)src;
+ __be32 *const out_blk = (__be32 *)dst;
+ const u32 *P = ctx->p;
+ const u32 *S = ctx->s;
+ u32 yl = be32_to_cpu(in_blk[0]);
+ u32 yr = be32_to_cpu(in_blk[1]);
+
+ ROUND(yr, yl, 17);
+ ROUND(yl, yr, 16);
+ ROUND(yr, yl, 15);
+ ROUND(yl, yr, 14);
+ ROUND(yr, yl, 13);
+ ROUND(yl, yr, 12);
+ ROUND(yr, yl, 11);
+ ROUND(yl, yr, 10);
+ ROUND(yr, yl, 9);
+ ROUND(yl, yr, 8);
+ ROUND(yr, yl, 7);
+ ROUND(yl, yr, 6);
+ ROUND(yr, yl, 5);
+ ROUND(yl, yr, 4);
+ ROUND(yr, yl, 3);
+ ROUND(yl, yr, 2);
+
+ yl ^= P[1];
+ yr ^= P[0];
+
+ out_blk[0] = cpu_to_be32(yr);
+ out_blk[1] = cpu_to_be32(yl);
+}
+
+static struct crypto_alg alg = {
+ .cra_name = "blowfish",
+ .cra_driver_name = "blowfish-generic",
+ .cra_priority = 100,
+ .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
+ .cra_blocksize = BF_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct bf_ctx),
+ .cra_alignmask = 3,
+ .cra_module = THIS_MODULE,
+ .cra_u = { .cipher = {
+ .cia_min_keysize = BF_MIN_KEY_SIZE,
+ .cia_max_keysize = BF_MAX_KEY_SIZE,
+ .cia_setkey = blowfish_setkey,
+ .cia_encrypt = bf_encrypt,
+ .cia_decrypt = bf_decrypt } }
+};
+
+static int __init blowfish_mod_init(void)
+{
+ return crypto_register_alg(&alg);
+}
+
+static void __exit blowfish_mod_fini(void)
+{
+ crypto_unregister_alg(&alg);
+}
+
+module_init(blowfish_mod_init);
+module_exit(blowfish_mod_fini);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Blowfish Cipher Algorithm");
+MODULE_ALIAS("blowfish");
diff --git a/crypto/camellia.c b/crypto/camellia_generic.c
index 964635d163f..26bcd7a2d6b 100644
--- a/crypto/camellia.c
+++ b/crypto/camellia_generic.c
@@ -39,271 +39,271 @@
#include <asm/unaligned.h>
static const u32 camellia_sp1110[256] = {
- 0x70707000,0x82828200,0x2c2c2c00,0xececec00,
- 0xb3b3b300,0x27272700,0xc0c0c000,0xe5e5e500,
- 0xe4e4e400,0x85858500,0x57575700,0x35353500,
- 0xeaeaea00,0x0c0c0c00,0xaeaeae00,0x41414100,
- 0x23232300,0xefefef00,0x6b6b6b00,0x93939300,
- 0x45454500,0x19191900,0xa5a5a500,0x21212100,
- 0xededed00,0x0e0e0e00,0x4f4f4f00,0x4e4e4e00,
- 0x1d1d1d00,0x65656500,0x92929200,0xbdbdbd00,
- 0x86868600,0xb8b8b800,0xafafaf00,0x8f8f8f00,
- 0x7c7c7c00,0xebebeb00,0x1f1f1f00,0xcecece00,
- 0x3e3e3e00,0x30303000,0xdcdcdc00,0x5f5f5f00,
- 0x5e5e5e00,0xc5c5c500,0x0b0b0b00,0x1a1a1a00,
- 0xa6a6a600,0xe1e1e100,0x39393900,0xcacaca00,
- 0xd5d5d500,0x47474700,0x5d5d5d00,0x3d3d3d00,
- 0xd9d9d900,0x01010100,0x5a5a5a00,0xd6d6d600,
- 0x51515100,0x56565600,0x6c6c6c00,0x4d4d4d00,
- 0x8b8b8b00,0x0d0d0d00,0x9a9a9a00,0x66666600,
- 0xfbfbfb00,0xcccccc00,0xb0b0b000,0x2d2d2d00,
- 0x74747400,0x12121200,0x2b2b2b00,0x20202000,
- 0xf0f0f000,0xb1b1b100,0x84848400,0x99999900,
- 0xdfdfdf00,0x4c4c4c00,0xcbcbcb00,0xc2c2c200,
- 0x34343400,0x7e7e7e00,0x76767600,0x05050500,
- 0x6d6d6d00,0xb7b7b700,0xa9a9a900,0x31313100,
- 0xd1d1d100,0x17171700,0x04040400,0xd7d7d700,
- 0x14141400,0x58585800,0x3a3a3a00,0x61616100,
- 0xdedede00,0x1b1b1b00,0x11111100,0x1c1c1c00,
- 0x32323200,0x0f0f0f00,0x9c9c9c00,0x16161600,
- 0x53535300,0x18181800,0xf2f2f200,0x22222200,
- 0xfefefe00,0x44444400,0xcfcfcf00,0xb2b2b200,
- 0xc3c3c300,0xb5b5b500,0x7a7a7a00,0x91919100,
- 0x24242400,0x08080800,0xe8e8e800,0xa8a8a800,
- 0x60606000,0xfcfcfc00,0x69696900,0x50505000,
- 0xaaaaaa00,0xd0d0d000,0xa0a0a000,0x7d7d7d00,
- 0xa1a1a100,0x89898900,0x62626200,0x97979700,
- 0x54545400,0x5b5b5b00,0x1e1e1e00,0x95959500,
- 0xe0e0e000,0xffffff00,0x64646400,0xd2d2d200,
- 0x10101000,0xc4c4c400,0x00000000,0x48484800,
- 0xa3a3a300,0xf7f7f700,0x75757500,0xdbdbdb00,
- 0x8a8a8a00,0x03030300,0xe6e6e600,0xdadada00,
- 0x09090900,0x3f3f3f00,0xdddddd00,0x94949400,
- 0x87878700,0x5c5c5c00,0x83838300,0x02020200,
- 0xcdcdcd00,0x4a4a4a00,0x90909000,0x33333300,
- 0x73737300,0x67676700,0xf6f6f600,0xf3f3f300,
- 0x9d9d9d00,0x7f7f7f00,0xbfbfbf00,0xe2e2e200,
- 0x52525200,0x9b9b9b00,0xd8d8d800,0x26262600,
- 0xc8c8c800,0x37373700,0xc6c6c600,0x3b3b3b00,
- 0x81818100,0x96969600,0x6f6f6f00,0x4b4b4b00,
- 0x13131300,0xbebebe00,0x63636300,0x2e2e2e00,
- 0xe9e9e900,0x79797900,0xa7a7a700,0x8c8c8c00,
- 0x9f9f9f00,0x6e6e6e00,0xbcbcbc00,0x8e8e8e00,
- 0x29292900,0xf5f5f500,0xf9f9f900,0xb6b6b600,
- 0x2f2f2f00,0xfdfdfd00,0xb4b4b400,0x59595900,
- 0x78787800,0x98989800,0x06060600,0x6a6a6a00,
- 0xe7e7e700,0x46464600,0x71717100,0xbababa00,
- 0xd4d4d400,0x25252500,0xababab00,0x42424200,
- 0x88888800,0xa2a2a200,0x8d8d8d00,0xfafafa00,
- 0x72727200,0x07070700,0xb9b9b900,0x55555500,
- 0xf8f8f800,0xeeeeee00,0xacacac00,0x0a0a0a00,
- 0x36363600,0x49494900,0x2a2a2a00,0x68686800,
- 0x3c3c3c00,0x38383800,0xf1f1f100,0xa4a4a400,
- 0x40404000,0x28282800,0xd3d3d300,0x7b7b7b00,
- 0xbbbbbb00,0xc9c9c900,0x43434300,0xc1c1c100,
- 0x15151500,0xe3e3e300,0xadadad00,0xf4f4f400,
- 0x77777700,0xc7c7c700,0x80808000,0x9e9e9e00,
+ 0x70707000, 0x82828200, 0x2c2c2c00, 0xececec00,
+ 0xb3b3b300, 0x27272700, 0xc0c0c000, 0xe5e5e500,
+ 0xe4e4e400, 0x85858500, 0x57575700, 0x35353500,
+ 0xeaeaea00, 0x0c0c0c00, 0xaeaeae00, 0x41414100,
+ 0x23232300, 0xefefef00, 0x6b6b6b00, 0x93939300,
+ 0x45454500, 0x19191900, 0xa5a5a500, 0x21212100,
+ 0xededed00, 0x0e0e0e00, 0x4f4f4f00, 0x4e4e4e00,
+ 0x1d1d1d00, 0x65656500, 0x92929200, 0xbdbdbd00,
+ 0x86868600, 0xb8b8b800, 0xafafaf00, 0x8f8f8f00,
+ 0x7c7c7c00, 0xebebeb00, 0x1f1f1f00, 0xcecece00,
+ 0x3e3e3e00, 0x30303000, 0xdcdcdc00, 0x5f5f5f00,
+ 0x5e5e5e00, 0xc5c5c500, 0x0b0b0b00, 0x1a1a1a00,
+ 0xa6a6a600, 0xe1e1e100, 0x39393900, 0xcacaca00,
+ 0xd5d5d500, 0x47474700, 0x5d5d5d00, 0x3d3d3d00,
+ 0xd9d9d900, 0x01010100, 0x5a5a5a00, 0xd6d6d600,
+ 0x51515100, 0x56565600, 0x6c6c6c00, 0x4d4d4d00,
+ 0x8b8b8b00, 0x0d0d0d00, 0x9a9a9a00, 0x66666600,
+ 0xfbfbfb00, 0xcccccc00, 0xb0b0b000, 0x2d2d2d00,
+ 0x74747400, 0x12121200, 0x2b2b2b00, 0x20202000,
+ 0xf0f0f000, 0xb1b1b100, 0x84848400, 0x99999900,
+ 0xdfdfdf00, 0x4c4c4c00, 0xcbcbcb00, 0xc2c2c200,
+ 0x34343400, 0x7e7e7e00, 0x76767600, 0x05050500,
+ 0x6d6d6d00, 0xb7b7b700, 0xa9a9a900, 0x31313100,
+ 0xd1d1d100, 0x17171700, 0x04040400, 0xd7d7d700,
+ 0x14141400, 0x58585800, 0x3a3a3a00, 0x61616100,
+ 0xdedede00, 0x1b1b1b00, 0x11111100, 0x1c1c1c00,
+ 0x32323200, 0x0f0f0f00, 0x9c9c9c00, 0x16161600,
+ 0x53535300, 0x18181800, 0xf2f2f200, 0x22222200,
+ 0xfefefe00, 0x44444400, 0xcfcfcf00, 0xb2b2b200,
+ 0xc3c3c300, 0xb5b5b500, 0x7a7a7a00, 0x91919100,
+ 0x24242400, 0x08080800, 0xe8e8e800, 0xa8a8a800,
+ 0x60606000, 0xfcfcfc00, 0x69696900, 0x50505000,
+ 0xaaaaaa00, 0xd0d0d000, 0xa0a0a000, 0x7d7d7d00,
+ 0xa1a1a100, 0x89898900, 0x62626200, 0x97979700,
+ 0x54545400, 0x5b5b5b00, 0x1e1e1e00, 0x95959500,
+ 0xe0e0e000, 0xffffff00, 0x64646400, 0xd2d2d200,
+ 0x10101000, 0xc4c4c400, 0x00000000, 0x48484800,
+ 0xa3a3a300, 0xf7f7f700, 0x75757500, 0xdbdbdb00,
+ 0x8a8a8a00, 0x03030300, 0xe6e6e600, 0xdadada00,
+ 0x09090900, 0x3f3f3f00, 0xdddddd00, 0x94949400,
+ 0x87878700, 0x5c5c5c00, 0x83838300, 0x02020200,
+ 0xcdcdcd00, 0x4a4a4a00, 0x90909000, 0x33333300,
+ 0x73737300, 0x67676700, 0xf6f6f600, 0xf3f3f300,
+ 0x9d9d9d00, 0x7f7f7f00, 0xbfbfbf00, 0xe2e2e200,
+ 0x52525200, 0x9b9b9b00, 0xd8d8d800, 0x26262600,
+ 0xc8c8c800, 0x37373700, 0xc6c6c600, 0x3b3b3b00,
+ 0x81818100, 0x96969600, 0x6f6f6f00, 0x4b4b4b00,
+ 0x13131300, 0xbebebe00, 0x63636300, 0x2e2e2e00,
+ 0xe9e9e900, 0x79797900, 0xa7a7a700, 0x8c8c8c00,
+ 0x9f9f9f00, 0x6e6e6e00, 0xbcbcbc00, 0x8e8e8e00,
+ 0x29292900, 0xf5f5f500, 0xf9f9f900, 0xb6b6b600,
+ 0x2f2f2f00, 0xfdfdfd00, 0xb4b4b400, 0x59595900,
+ 0x78787800, 0x98989800, 0x06060600, 0x6a6a6a00,
+ 0xe7e7e700, 0x46464600, 0x71717100, 0xbababa00,
+ 0xd4d4d400, 0x25252500, 0xababab00, 0x42424200,
+ 0x88888800, 0xa2a2a200, 0x8d8d8d00, 0xfafafa00,
+ 0x72727200, 0x07070700, 0xb9b9b900, 0x55555500,
+ 0xf8f8f800, 0xeeeeee00, 0xacacac00, 0x0a0a0a00,
+ 0x36363600, 0x49494900, 0x2a2a2a00, 0x68686800,
+ 0x3c3c3c00, 0x38383800, 0xf1f1f100, 0xa4a4a400,
+ 0x40404000, 0x28282800, 0xd3d3d300, 0x7b7b7b00,
+ 0xbbbbbb00, 0xc9c9c900, 0x43434300, 0xc1c1c100,
+ 0x15151500, 0xe3e3e300, 0xadadad00, 0xf4f4f400,
+ 0x77777700, 0xc7c7c700, 0x80808000, 0x9e9e9e00,
};
static const u32 camellia_sp0222[256] = {
- 0x00e0e0e0,0x00050505,0x00585858,0x00d9d9d9,
- 0x00676767,0x004e4e4e,0x00818181,0x00cbcbcb,
- 0x00c9c9c9,0x000b0b0b,0x00aeaeae,0x006a6a6a,
- 0x00d5d5d5,0x00181818,0x005d5d5d,0x00828282,
- 0x00464646,0x00dfdfdf,0x00d6d6d6,0x00272727,
- 0x008a8a8a,0x00323232,0x004b4b4b,0x00424242,
- 0x00dbdbdb,0x001c1c1c,0x009e9e9e,0x009c9c9c,
- 0x003a3a3a,0x00cacaca,0x00252525,0x007b7b7b,
- 0x000d0d0d,0x00717171,0x005f5f5f,0x001f1f1f,
- 0x00f8f8f8,0x00d7d7d7,0x003e3e3e,0x009d9d9d,
- 0x007c7c7c,0x00606060,0x00b9b9b9,0x00bebebe,
- 0x00bcbcbc,0x008b8b8b,0x00161616,0x00343434,
- 0x004d4d4d,0x00c3c3c3,0x00727272,0x00959595,
- 0x00ababab,0x008e8e8e,0x00bababa,0x007a7a7a,
- 0x00b3b3b3,0x00020202,0x00b4b4b4,0x00adadad,
- 0x00a2a2a2,0x00acacac,0x00d8d8d8,0x009a9a9a,
- 0x00171717,0x001a1a1a,0x00353535,0x00cccccc,
- 0x00f7f7f7,0x00999999,0x00616161,0x005a5a5a,
- 0x00e8e8e8,0x00242424,0x00565656,0x00404040,
- 0x00e1e1e1,0x00636363,0x00090909,0x00333333,
- 0x00bfbfbf,0x00989898,0x00979797,0x00858585,
- 0x00686868,0x00fcfcfc,0x00ececec,0x000a0a0a,
- 0x00dadada,0x006f6f6f,0x00535353,0x00626262,
- 0x00a3a3a3,0x002e2e2e,0x00080808,0x00afafaf,
- 0x00282828,0x00b0b0b0,0x00747474,0x00c2c2c2,
- 0x00bdbdbd,0x00363636,0x00222222,0x00383838,
- 0x00646464,0x001e1e1e,0x00393939,0x002c2c2c,
- 0x00a6a6a6,0x00303030,0x00e5e5e5,0x00444444,
- 0x00fdfdfd,0x00888888,0x009f9f9f,0x00656565,
- 0x00878787,0x006b6b6b,0x00f4f4f4,0x00232323,
- 0x00484848,0x00101010,0x00d1d1d1,0x00515151,
- 0x00c0c0c0,0x00f9f9f9,0x00d2d2d2,0x00a0a0a0,
- 0x00555555,0x00a1a1a1,0x00414141,0x00fafafa,
- 0x00434343,0x00131313,0x00c4c4c4,0x002f2f2f,
- 0x00a8a8a8,0x00b6b6b6,0x003c3c3c,0x002b2b2b,
- 0x00c1c1c1,0x00ffffff,0x00c8c8c8,0x00a5a5a5,
- 0x00202020,0x00898989,0x00000000,0x00909090,
- 0x00474747,0x00efefef,0x00eaeaea,0x00b7b7b7,
- 0x00151515,0x00060606,0x00cdcdcd,0x00b5b5b5,
- 0x00121212,0x007e7e7e,0x00bbbbbb,0x00292929,
- 0x000f0f0f,0x00b8b8b8,0x00070707,0x00040404,
- 0x009b9b9b,0x00949494,0x00212121,0x00666666,
- 0x00e6e6e6,0x00cecece,0x00ededed,0x00e7e7e7,
- 0x003b3b3b,0x00fefefe,0x007f7f7f,0x00c5c5c5,
- 0x00a4a4a4,0x00373737,0x00b1b1b1,0x004c4c4c,
- 0x00919191,0x006e6e6e,0x008d8d8d,0x00767676,
- 0x00030303,0x002d2d2d,0x00dedede,0x00969696,
- 0x00262626,0x007d7d7d,0x00c6c6c6,0x005c5c5c,
- 0x00d3d3d3,0x00f2f2f2,0x004f4f4f,0x00191919,
- 0x003f3f3f,0x00dcdcdc,0x00797979,0x001d1d1d,
- 0x00525252,0x00ebebeb,0x00f3f3f3,0x006d6d6d,
- 0x005e5e5e,0x00fbfbfb,0x00696969,0x00b2b2b2,
- 0x00f0f0f0,0x00313131,0x000c0c0c,0x00d4d4d4,
- 0x00cfcfcf,0x008c8c8c,0x00e2e2e2,0x00757575,
- 0x00a9a9a9,0x004a4a4a,0x00575757,0x00848484,
- 0x00111111,0x00454545,0x001b1b1b,0x00f5f5f5,
- 0x00e4e4e4,0x000e0e0e,0x00737373,0x00aaaaaa,
- 0x00f1f1f1,0x00dddddd,0x00595959,0x00141414,
- 0x006c6c6c,0x00929292,0x00545454,0x00d0d0d0,
- 0x00787878,0x00707070,0x00e3e3e3,0x00494949,
- 0x00808080,0x00505050,0x00a7a7a7,0x00f6f6f6,
- 0x00777777,0x00939393,0x00868686,0x00838383,
- 0x002a2a2a,0x00c7c7c7,0x005b5b5b,0x00e9e9e9,
- 0x00eeeeee,0x008f8f8f,0x00010101,0x003d3d3d,
+ 0x00e0e0e0, 0x00050505, 0x00585858, 0x00d9d9d9,
+ 0x00676767, 0x004e4e4e, 0x00818181, 0x00cbcbcb,
+ 0x00c9c9c9, 0x000b0b0b, 0x00aeaeae, 0x006a6a6a,
+ 0x00d5d5d5, 0x00181818, 0x005d5d5d, 0x00828282,
+ 0x00464646, 0x00dfdfdf, 0x00d6d6d6, 0x00272727,
+ 0x008a8a8a, 0x00323232, 0x004b4b4b, 0x00424242,
+ 0x00dbdbdb, 0x001c1c1c, 0x009e9e9e, 0x009c9c9c,
+ 0x003a3a3a, 0x00cacaca, 0x00252525, 0x007b7b7b,
+ 0x000d0d0d, 0x00717171, 0x005f5f5f, 0x001f1f1f,
+ 0x00f8f8f8, 0x00d7d7d7, 0x003e3e3e, 0x009d9d9d,
+ 0x007c7c7c, 0x00606060, 0x00b9b9b9, 0x00bebebe,
+ 0x00bcbcbc, 0x008b8b8b, 0x00161616, 0x00343434,
+ 0x004d4d4d, 0x00c3c3c3, 0x00727272, 0x00959595,
+ 0x00ababab, 0x008e8e8e, 0x00bababa, 0x007a7a7a,
+ 0x00b3b3b3, 0x00020202, 0x00b4b4b4, 0x00adadad,
+ 0x00a2a2a2, 0x00acacac, 0x00d8d8d8, 0x009a9a9a,
+ 0x00171717, 0x001a1a1a, 0x00353535, 0x00cccccc,
+ 0x00f7f7f7, 0x00999999, 0x00616161, 0x005a5a5a,
+ 0x00e8e8e8, 0x00242424, 0x00565656, 0x00404040,
+ 0x00e1e1e1, 0x00636363, 0x00090909, 0x00333333,
+ 0x00bfbfbf, 0x00989898, 0x00979797, 0x00858585,
+ 0x00686868, 0x00fcfcfc, 0x00ececec, 0x000a0a0a,
+ 0x00dadada, 0x006f6f6f, 0x00535353, 0x00626262,
+ 0x00a3a3a3, 0x002e2e2e, 0x00080808, 0x00afafaf,
+ 0x00282828, 0x00b0b0b0, 0x00747474, 0x00c2c2c2,
+ 0x00bdbdbd, 0x00363636, 0x00222222, 0x00383838,
+ 0x00646464, 0x001e1e1e, 0x00393939, 0x002c2c2c,
+ 0x00a6a6a6, 0x00303030, 0x00e5e5e5, 0x00444444,
+ 0x00fdfdfd, 0x00888888, 0x009f9f9f, 0x00656565,
+ 0x00878787, 0x006b6b6b, 0x00f4f4f4, 0x00232323,
+ 0x00484848, 0x00101010, 0x00d1d1d1, 0x00515151,
+ 0x00c0c0c0, 0x00f9f9f9, 0x00d2d2d2, 0x00a0a0a0,
+ 0x00555555, 0x00a1a1a1, 0x00414141, 0x00fafafa,
+ 0x00434343, 0x00131313, 0x00c4c4c4, 0x002f2f2f,
+ 0x00a8a8a8, 0x00b6b6b6, 0x003c3c3c, 0x002b2b2b,
+ 0x00c1c1c1, 0x00ffffff, 0x00c8c8c8, 0x00a5a5a5,
+ 0x00202020, 0x00898989, 0x00000000, 0x00909090,
+ 0x00474747, 0x00efefef, 0x00eaeaea, 0x00b7b7b7,
+ 0x00151515, 0x00060606, 0x00cdcdcd, 0x00b5b5b5,
+ 0x00121212, 0x007e7e7e, 0x00bbbbbb, 0x00292929,
+ 0x000f0f0f, 0x00b8b8b8, 0x00070707, 0x00040404,
+ 0x009b9b9b, 0x00949494, 0x00212121, 0x00666666,
+ 0x00e6e6e6, 0x00cecece, 0x00ededed, 0x00e7e7e7,
+ 0x003b3b3b, 0x00fefefe, 0x007f7f7f, 0x00c5c5c5,
+ 0x00a4a4a4, 0x00373737, 0x00b1b1b1, 0x004c4c4c,
+ 0x00919191, 0x006e6e6e, 0x008d8d8d, 0x00767676,
+ 0x00030303, 0x002d2d2d, 0x00dedede, 0x00969696,
+ 0x00262626, 0x007d7d7d, 0x00c6c6c6, 0x005c5c5c,
+ 0x00d3d3d3, 0x00f2f2f2, 0x004f4f4f, 0x00191919,
+ 0x003f3f3f, 0x00dcdcdc, 0x00797979, 0x001d1d1d,
+ 0x00525252, 0x00ebebeb, 0x00f3f3f3, 0x006d6d6d,
+ 0x005e5e5e, 0x00fbfbfb, 0x00696969, 0x00b2b2b2,
+ 0x00f0f0f0, 0x00313131, 0x000c0c0c, 0x00d4d4d4,
+ 0x00cfcfcf, 0x008c8c8c, 0x00e2e2e2, 0x00757575,
+ 0x00a9a9a9, 0x004a4a4a, 0x00575757, 0x00848484,
+ 0x00111111, 0x00454545, 0x001b1b1b, 0x00f5f5f5,
+ 0x00e4e4e4, 0x000e0e0e, 0x00737373, 0x00aaaaaa,
+ 0x00f1f1f1, 0x00dddddd, 0x00595959, 0x00141414,
+ 0x006c6c6c, 0x00929292, 0x00545454, 0x00d0d0d0,
+ 0x00787878, 0x00707070, 0x00e3e3e3, 0x00494949,
+ 0x00808080, 0x00505050, 0x00a7a7a7, 0x00f6f6f6,
+ 0x00777777, 0x00939393, 0x00868686, 0x00838383,
+ 0x002a2a2a, 0x00c7c7c7, 0x005b5b5b, 0x00e9e9e9,
+ 0x00eeeeee, 0x008f8f8f, 0x00010101, 0x003d3d3d,
};
static const u32 camellia_sp3033[256] = {
- 0x38003838,0x41004141,0x16001616,0x76007676,
- 0xd900d9d9,0x93009393,0x60006060,0xf200f2f2,
- 0x72007272,0xc200c2c2,0xab00abab,0x9a009a9a,
- 0x75007575,0x06000606,0x57005757,0xa000a0a0,
- 0x91009191,0xf700f7f7,0xb500b5b5,0xc900c9c9,
- 0xa200a2a2,0x8c008c8c,0xd200d2d2,0x90009090,
- 0xf600f6f6,0x07000707,0xa700a7a7,0x27002727,
- 0x8e008e8e,0xb200b2b2,0x49004949,0xde00dede,
- 0x43004343,0x5c005c5c,0xd700d7d7,0xc700c7c7,
- 0x3e003e3e,0xf500f5f5,0x8f008f8f,0x67006767,
- 0x1f001f1f,0x18001818,0x6e006e6e,0xaf00afaf,
- 0x2f002f2f,0xe200e2e2,0x85008585,0x0d000d0d,
- 0x53005353,0xf000f0f0,0x9c009c9c,0x65006565,
- 0xea00eaea,0xa300a3a3,0xae00aeae,0x9e009e9e,
- 0xec00ecec,0x80008080,0x2d002d2d,0x6b006b6b,
- 0xa800a8a8,0x2b002b2b,0x36003636,0xa600a6a6,
- 0xc500c5c5,0x86008686,0x4d004d4d,0x33003333,
- 0xfd00fdfd,0x66006666,0x58005858,0x96009696,
- 0x3a003a3a,0x09000909,0x95009595,0x10001010,
- 0x78007878,0xd800d8d8,0x42004242,0xcc00cccc,
- 0xef00efef,0x26002626,0xe500e5e5,0x61006161,
- 0x1a001a1a,0x3f003f3f,0x3b003b3b,0x82008282,
- 0xb600b6b6,0xdb00dbdb,0xd400d4d4,0x98009898,
- 0xe800e8e8,0x8b008b8b,0x02000202,0xeb00ebeb,
- 0x0a000a0a,0x2c002c2c,0x1d001d1d,0xb000b0b0,
- 0x6f006f6f,0x8d008d8d,0x88008888,0x0e000e0e,
- 0x19001919,0x87008787,0x4e004e4e,0x0b000b0b,
- 0xa900a9a9,0x0c000c0c,0x79007979,0x11001111,
- 0x7f007f7f,0x22002222,0xe700e7e7,0x59005959,
- 0xe100e1e1,0xda00dada,0x3d003d3d,0xc800c8c8,
- 0x12001212,0x04000404,0x74007474,0x54005454,
- 0x30003030,0x7e007e7e,0xb400b4b4,0x28002828,
- 0x55005555,0x68006868,0x50005050,0xbe00bebe,
- 0xd000d0d0,0xc400c4c4,0x31003131,0xcb00cbcb,
- 0x2a002a2a,0xad00adad,0x0f000f0f,0xca00caca,
- 0x70007070,0xff00ffff,0x32003232,0x69006969,
- 0x08000808,0x62006262,0x00000000,0x24002424,
- 0xd100d1d1,0xfb00fbfb,0xba00baba,0xed00eded,
- 0x45004545,0x81008181,0x73007373,0x6d006d6d,
- 0x84008484,0x9f009f9f,0xee00eeee,0x4a004a4a,
- 0xc300c3c3,0x2e002e2e,0xc100c1c1,0x01000101,
- 0xe600e6e6,0x25002525,0x48004848,0x99009999,
- 0xb900b9b9,0xb300b3b3,0x7b007b7b,0xf900f9f9,
- 0xce00cece,0xbf00bfbf,0xdf00dfdf,0x71007171,
- 0x29002929,0xcd00cdcd,0x6c006c6c,0x13001313,
- 0x64006464,0x9b009b9b,0x63006363,0x9d009d9d,
- 0xc000c0c0,0x4b004b4b,0xb700b7b7,0xa500a5a5,
- 0x89008989,0x5f005f5f,0xb100b1b1,0x17001717,
- 0xf400f4f4,0xbc00bcbc,0xd300d3d3,0x46004646,
- 0xcf00cfcf,0x37003737,0x5e005e5e,0x47004747,
- 0x94009494,0xfa00fafa,0xfc00fcfc,0x5b005b5b,
- 0x97009797,0xfe00fefe,0x5a005a5a,0xac00acac,
- 0x3c003c3c,0x4c004c4c,0x03000303,0x35003535,
- 0xf300f3f3,0x23002323,0xb800b8b8,0x5d005d5d,
- 0x6a006a6a,0x92009292,0xd500d5d5,0x21002121,
- 0x44004444,0x51005151,0xc600c6c6,0x7d007d7d,
- 0x39003939,0x83008383,0xdc00dcdc,0xaa00aaaa,
- 0x7c007c7c,0x77007777,0x56005656,0x05000505,
- 0x1b001b1b,0xa400a4a4,0x15001515,0x34003434,
- 0x1e001e1e,0x1c001c1c,0xf800f8f8,0x52005252,
- 0x20002020,0x14001414,0xe900e9e9,0xbd00bdbd,
- 0xdd00dddd,0xe400e4e4,0xa100a1a1,0xe000e0e0,
- 0x8a008a8a,0xf100f1f1,0xd600d6d6,0x7a007a7a,
- 0xbb00bbbb,0xe300e3e3,0x40004040,0x4f004f4f,
+ 0x38003838, 0x41004141, 0x16001616, 0x76007676,
+ 0xd900d9d9, 0x93009393, 0x60006060, 0xf200f2f2,
+ 0x72007272, 0xc200c2c2, 0xab00abab, 0x9a009a9a,
+ 0x75007575, 0x06000606, 0x57005757, 0xa000a0a0,
+ 0x91009191, 0xf700f7f7, 0xb500b5b5, 0xc900c9c9,
+ 0xa200a2a2, 0x8c008c8c, 0xd200d2d2, 0x90009090,
+ 0xf600f6f6, 0x07000707, 0xa700a7a7, 0x27002727,
+ 0x8e008e8e, 0xb200b2b2, 0x49004949, 0xde00dede,
+ 0x43004343, 0x5c005c5c, 0xd700d7d7, 0xc700c7c7,
+ 0x3e003e3e, 0xf500f5f5, 0x8f008f8f, 0x67006767,
+ 0x1f001f1f, 0x18001818, 0x6e006e6e, 0xaf00afaf,
+ 0x2f002f2f, 0xe200e2e2, 0x85008585, 0x0d000d0d,
+ 0x53005353, 0xf000f0f0, 0x9c009c9c, 0x65006565,
+ 0xea00eaea, 0xa300a3a3, 0xae00aeae, 0x9e009e9e,
+ 0xec00ecec, 0x80008080, 0x2d002d2d, 0x6b006b6b,
+ 0xa800a8a8, 0x2b002b2b, 0x36003636, 0xa600a6a6,
+ 0xc500c5c5, 0x86008686, 0x4d004d4d, 0x33003333,
+ 0xfd00fdfd, 0x66006666, 0x58005858, 0x96009696,
+ 0x3a003a3a, 0x09000909, 0x95009595, 0x10001010,
+ 0x78007878, 0xd800d8d8, 0x42004242, 0xcc00cccc,
+ 0xef00efef, 0x26002626, 0xe500e5e5, 0x61006161,
+ 0x1a001a1a, 0x3f003f3f, 0x3b003b3b, 0x82008282,
+ 0xb600b6b6, 0xdb00dbdb, 0xd400d4d4, 0x98009898,
+ 0xe800e8e8, 0x8b008b8b, 0x02000202, 0xeb00ebeb,
+ 0x0a000a0a, 0x2c002c2c, 0x1d001d1d, 0xb000b0b0,
+ 0x6f006f6f, 0x8d008d8d, 0x88008888, 0x0e000e0e,
+ 0x19001919, 0x87008787, 0x4e004e4e, 0x0b000b0b,
+ 0xa900a9a9, 0x0c000c0c, 0x79007979, 0x11001111,
+ 0x7f007f7f, 0x22002222, 0xe700e7e7, 0x59005959,
+ 0xe100e1e1, 0xda00dada, 0x3d003d3d, 0xc800c8c8,
+ 0x12001212, 0x04000404, 0x74007474, 0x54005454,
+ 0x30003030, 0x7e007e7e, 0xb400b4b4, 0x28002828,
+ 0x55005555, 0x68006868, 0x50005050, 0xbe00bebe,
+ 0xd000d0d0, 0xc400c4c4, 0x31003131, 0xcb00cbcb,
+ 0x2a002a2a, 0xad00adad, 0x0f000f0f, 0xca00caca,
+ 0x70007070, 0xff00ffff, 0x32003232, 0x69006969,
+ 0x08000808, 0x62006262, 0x00000000, 0x24002424,
+ 0xd100d1d1, 0xfb00fbfb, 0xba00baba, 0xed00eded,
+ 0x45004545, 0x81008181, 0x73007373, 0x6d006d6d,
+ 0x84008484, 0x9f009f9f, 0xee00eeee, 0x4a004a4a,
+ 0xc300c3c3, 0x2e002e2e, 0xc100c1c1, 0x01000101,
+ 0xe600e6e6, 0x25002525, 0x48004848, 0x99009999,
+ 0xb900b9b9, 0xb300b3b3, 0x7b007b7b, 0xf900f9f9,
+ 0xce00cece, 0xbf00bfbf, 0xdf00dfdf, 0x71007171,
+ 0x29002929, 0xcd00cdcd, 0x6c006c6c, 0x13001313,
+ 0x64006464, 0x9b009b9b, 0x63006363, 0x9d009d9d,
+ 0xc000c0c0, 0x4b004b4b, 0xb700b7b7, 0xa500a5a5,
+ 0x89008989, 0x5f005f5f, 0xb100b1b1, 0x17001717,
+ 0xf400f4f4, 0xbc00bcbc, 0xd300d3d3, 0x46004646,
+ 0xcf00cfcf, 0x37003737, 0x5e005e5e, 0x47004747,
+ 0x94009494, 0xfa00fafa, 0xfc00fcfc, 0x5b005b5b,
+ 0x97009797, 0xfe00fefe, 0x5a005a5a, 0xac00acac,
+ 0x3c003c3c, 0x4c004c4c, 0x03000303, 0x35003535,
+ 0xf300f3f3, 0x23002323, 0xb800b8b8, 0x5d005d5d,
+ 0x6a006a6a, 0x92009292, 0xd500d5d5, 0x21002121,
+ 0x44004444, 0x51005151, 0xc600c6c6, 0x7d007d7d,
+ 0x39003939, 0x83008383, 0xdc00dcdc, 0xaa00aaaa,
+ 0x7c007c7c, 0x77007777, 0x56005656, 0x05000505,
+ 0x1b001b1b, 0xa400a4a4, 0x15001515, 0x34003434,
+ 0x1e001e1e, 0x1c001c1c, 0xf800f8f8, 0x52005252,
+ 0x20002020, 0x14001414, 0xe900e9e9, 0xbd00bdbd,
+ 0xdd00dddd, 0xe400e4e4, 0xa100a1a1, 0xe000e0e0,
+ 0x8a008a8a, 0xf100f1f1, 0xd600d6d6, 0x7a007a7a,
+ 0xbb00bbbb, 0xe300e3e3, 0x40004040, 0x4f004f4f,
};
static const u32 camellia_sp4404[256] = {
- 0x70700070,0x2c2c002c,0xb3b300b3,0xc0c000c0,
- 0xe4e400e4,0x57570057,0xeaea00ea,0xaeae00ae,
- 0x23230023,0x6b6b006b,0x45450045,0xa5a500a5,
- 0xeded00ed,0x4f4f004f,0x1d1d001d,0x92920092,
- 0x86860086,0xafaf00af,0x7c7c007c,0x1f1f001f,
- 0x3e3e003e,0xdcdc00dc,0x5e5e005e,0x0b0b000b,
- 0xa6a600a6,0x39390039,0xd5d500d5,0x5d5d005d,
- 0xd9d900d9,0x5a5a005a,0x51510051,0x6c6c006c,
- 0x8b8b008b,0x9a9a009a,0xfbfb00fb,0xb0b000b0,
- 0x74740074,0x2b2b002b,0xf0f000f0,0x84840084,
- 0xdfdf00df,0xcbcb00cb,0x34340034,0x76760076,
- 0x6d6d006d,0xa9a900a9,0xd1d100d1,0x04040004,
- 0x14140014,0x3a3a003a,0xdede00de,0x11110011,
- 0x32320032,0x9c9c009c,0x53530053,0xf2f200f2,
- 0xfefe00fe,0xcfcf00cf,0xc3c300c3,0x7a7a007a,
- 0x24240024,0xe8e800e8,0x60600060,0x69690069,
- 0xaaaa00aa,0xa0a000a0,0xa1a100a1,0x62620062,
- 0x54540054,0x1e1e001e,0xe0e000e0,0x64640064,
- 0x10100010,0x00000000,0xa3a300a3,0x75750075,
- 0x8a8a008a,0xe6e600e6,0x09090009,0xdddd00dd,
- 0x87870087,0x83830083,0xcdcd00cd,0x90900090,
- 0x73730073,0xf6f600f6,0x9d9d009d,0xbfbf00bf,
- 0x52520052,0xd8d800d8,0xc8c800c8,0xc6c600c6,
- 0x81810081,0x6f6f006f,0x13130013,0x63630063,
- 0xe9e900e9,0xa7a700a7,0x9f9f009f,0xbcbc00bc,
- 0x29290029,0xf9f900f9,0x2f2f002f,0xb4b400b4,
- 0x78780078,0x06060006,0xe7e700e7,0x71710071,
- 0xd4d400d4,0xabab00ab,0x88880088,0x8d8d008d,
- 0x72720072,0xb9b900b9,0xf8f800f8,0xacac00ac,
- 0x36360036,0x2a2a002a,0x3c3c003c,0xf1f100f1,
- 0x40400040,0xd3d300d3,0xbbbb00bb,0x43430043,
- 0x15150015,0xadad00ad,0x77770077,0x80800080,
- 0x82820082,0xecec00ec,0x27270027,0xe5e500e5,
- 0x85850085,0x35350035,0x0c0c000c,0x41410041,
- 0xefef00ef,0x93930093,0x19190019,0x21210021,
- 0x0e0e000e,0x4e4e004e,0x65650065,0xbdbd00bd,
- 0xb8b800b8,0x8f8f008f,0xebeb00eb,0xcece00ce,
- 0x30300030,0x5f5f005f,0xc5c500c5,0x1a1a001a,
- 0xe1e100e1,0xcaca00ca,0x47470047,0x3d3d003d,
- 0x01010001,0xd6d600d6,0x56560056,0x4d4d004d,
- 0x0d0d000d,0x66660066,0xcccc00cc,0x2d2d002d,
- 0x12120012,0x20200020,0xb1b100b1,0x99990099,
- 0x4c4c004c,0xc2c200c2,0x7e7e007e,0x05050005,
- 0xb7b700b7,0x31310031,0x17170017,0xd7d700d7,
- 0x58580058,0x61610061,0x1b1b001b,0x1c1c001c,
- 0x0f0f000f,0x16160016,0x18180018,0x22220022,
- 0x44440044,0xb2b200b2,0xb5b500b5,0x91910091,
- 0x08080008,0xa8a800a8,0xfcfc00fc,0x50500050,
- 0xd0d000d0,0x7d7d007d,0x89890089,0x97970097,
- 0x5b5b005b,0x95950095,0xffff00ff,0xd2d200d2,
- 0xc4c400c4,0x48480048,0xf7f700f7,0xdbdb00db,
- 0x03030003,0xdada00da,0x3f3f003f,0x94940094,
- 0x5c5c005c,0x02020002,0x4a4a004a,0x33330033,
- 0x67670067,0xf3f300f3,0x7f7f007f,0xe2e200e2,
- 0x9b9b009b,0x26260026,0x37370037,0x3b3b003b,
- 0x96960096,0x4b4b004b,0xbebe00be,0x2e2e002e,
- 0x79790079,0x8c8c008c,0x6e6e006e,0x8e8e008e,
- 0xf5f500f5,0xb6b600b6,0xfdfd00fd,0x59590059,
- 0x98980098,0x6a6a006a,0x46460046,0xbaba00ba,
- 0x25250025,0x42420042,0xa2a200a2,0xfafa00fa,
- 0x07070007,0x55550055,0xeeee00ee,0x0a0a000a,
- 0x49490049,0x68680068,0x38380038,0xa4a400a4,
- 0x28280028,0x7b7b007b,0xc9c900c9,0xc1c100c1,
- 0xe3e300e3,0xf4f400f4,0xc7c700c7,0x9e9e009e,
+ 0x70700070, 0x2c2c002c, 0xb3b300b3, 0xc0c000c0,
+ 0xe4e400e4, 0x57570057, 0xeaea00ea, 0xaeae00ae,
+ 0x23230023, 0x6b6b006b, 0x45450045, 0xa5a500a5,
+ 0xeded00ed, 0x4f4f004f, 0x1d1d001d, 0x92920092,
+ 0x86860086, 0xafaf00af, 0x7c7c007c, 0x1f1f001f,
+ 0x3e3e003e, 0xdcdc00dc, 0x5e5e005e, 0x0b0b000b,
+ 0xa6a600a6, 0x39390039, 0xd5d500d5, 0x5d5d005d,
+ 0xd9d900d9, 0x5a5a005a, 0x51510051, 0x6c6c006c,
+ 0x8b8b008b, 0x9a9a009a, 0xfbfb00fb, 0xb0b000b0,
+ 0x74740074, 0x2b2b002b, 0xf0f000f0, 0x84840084,
+ 0xdfdf00df, 0xcbcb00cb, 0x34340034, 0x76760076,
+ 0x6d6d006d, 0xa9a900a9, 0xd1d100d1, 0x04040004,
+ 0x14140014, 0x3a3a003a, 0xdede00de, 0x11110011,
+ 0x32320032, 0x9c9c009c, 0x53530053, 0xf2f200f2,
+ 0xfefe00fe, 0xcfcf00cf, 0xc3c300c3, 0x7a7a007a,
+ 0x24240024, 0xe8e800e8, 0x60600060, 0x69690069,
+ 0xaaaa00aa, 0xa0a000a0, 0xa1a100a1, 0x62620062,
+ 0x54540054, 0x1e1e001e, 0xe0e000e0, 0x64640064,
+ 0x10100010, 0x00000000, 0xa3a300a3, 0x75750075,
+ 0x8a8a008a, 0xe6e600e6, 0x09090009, 0xdddd00dd,
+ 0x87870087, 0x83830083, 0xcdcd00cd, 0x90900090,
+ 0x73730073, 0xf6f600f6, 0x9d9d009d, 0xbfbf00bf,
+ 0x52520052, 0xd8d800d8, 0xc8c800c8, 0xc6c600c6,
+ 0x81810081, 0x6f6f006f, 0x13130013, 0x63630063,
+ 0xe9e900e9, 0xa7a700a7, 0x9f9f009f, 0xbcbc00bc,
+ 0x29290029, 0xf9f900f9, 0x2f2f002f, 0xb4b400b4,
+ 0x78780078, 0x06060006, 0xe7e700e7, 0x71710071,
+ 0xd4d400d4, 0xabab00ab, 0x88880088, 0x8d8d008d,
+ 0x72720072, 0xb9b900b9, 0xf8f800f8, 0xacac00ac,
+ 0x36360036, 0x2a2a002a, 0x3c3c003c, 0xf1f100f1,
+ 0x40400040, 0xd3d300d3, 0xbbbb00bb, 0x43430043,
+ 0x15150015, 0xadad00ad, 0x77770077, 0x80800080,
+ 0x82820082, 0xecec00ec, 0x27270027, 0xe5e500e5,
+ 0x85850085, 0x35350035, 0x0c0c000c, 0x41410041,
+ 0xefef00ef, 0x93930093, 0x19190019, 0x21210021,
+ 0x0e0e000e, 0x4e4e004e, 0x65650065, 0xbdbd00bd,
+ 0xb8b800b8, 0x8f8f008f, 0xebeb00eb, 0xcece00ce,
+ 0x30300030, 0x5f5f005f, 0xc5c500c5, 0x1a1a001a,
+ 0xe1e100e1, 0xcaca00ca, 0x47470047, 0x3d3d003d,
+ 0x01010001, 0xd6d600d6, 0x56560056, 0x4d4d004d,
+ 0x0d0d000d, 0x66660066, 0xcccc00cc, 0x2d2d002d,
+ 0x12120012, 0x20200020, 0xb1b100b1, 0x99990099,
+ 0x4c4c004c, 0xc2c200c2, 0x7e7e007e, 0x05050005,
+ 0xb7b700b7, 0x31310031, 0x17170017, 0xd7d700d7,
+ 0x58580058, 0x61610061, 0x1b1b001b, 0x1c1c001c,
+ 0x0f0f000f, 0x16160016, 0x18180018, 0x22220022,
+ 0x44440044, 0xb2b200b2, 0xb5b500b5, 0x91910091,
+ 0x08080008, 0xa8a800a8, 0xfcfc00fc, 0x50500050,
+ 0xd0d000d0, 0x7d7d007d, 0x89890089, 0x97970097,
+ 0x5b5b005b, 0x95950095, 0xffff00ff, 0xd2d200d2,
+ 0xc4c400c4, 0x48480048, 0xf7f700f7, 0xdbdb00db,
+ 0x03030003, 0xdada00da, 0x3f3f003f, 0x94940094,
+ 0x5c5c005c, 0x02020002, 0x4a4a004a, 0x33330033,
+ 0x67670067, 0xf3f300f3, 0x7f7f007f, 0xe2e200e2,
+ 0x9b9b009b, 0x26260026, 0x37370037, 0x3b3b003b,
+ 0x96960096, 0x4b4b004b, 0xbebe00be, 0x2e2e002e,
+ 0x79790079, 0x8c8c008c, 0x6e6e006e, 0x8e8e008e,
+ 0xf5f500f5, 0xb6b600b6, 0xfdfd00fd, 0x59590059,
+ 0x98980098, 0x6a6a006a, 0x46460046, 0xbaba00ba,
+ 0x25250025, 0x42420042, 0xa2a200a2, 0xfafa00fa,
+ 0x07070007, 0x55550055, 0xeeee00ee, 0x0a0a000a,
+ 0x49490049, 0x68680068, 0x38380038, 0xa4a400a4,
+ 0x28280028, 0x7b7b007b, 0xc9c900c9, 0xc1c100c1,
+ 0xe3e300e3, 0xf4f400f4, 0xc7c700c7, 0x9e9e009e,
};
@@ -337,43 +337,40 @@ static const u32 camellia_sp4404[256] = {
/*
* macros
*/
-#define ROLDQ(ll, lr, rl, rr, w0, w1, bits) \
- do { \
+#define ROLDQ(ll, lr, rl, rr, w0, w1, bits) ({ \
w0 = ll; \
ll = (ll << bits) + (lr >> (32 - bits)); \
lr = (lr << bits) + (rl >> (32 - bits)); \
rl = (rl << bits) + (rr >> (32 - bits)); \
rr = (rr << bits) + (w0 >> (32 - bits)); \
- } while(0)
+})
-#define ROLDQo32(ll, lr, rl, rr, w0, w1, bits) \
- do { \
+#define ROLDQo32(ll, lr, rl, rr, w0, w1, bits) ({ \
w0 = ll; \
w1 = lr; \
ll = (lr << (bits - 32)) + (rl >> (64 - bits)); \
lr = (rl << (bits - 32)) + (rr >> (64 - bits)); \
rl = (rr << (bits - 32)) + (w0 >> (64 - bits)); \
rr = (w0 << (bits - 32)) + (w1 >> (64 - bits)); \
- } while(0)
+})
-#define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) \
- do { \
+#define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) ({ \
il = xl ^ kl; \
ir = xr ^ kr; \
t0 = il >> 16; \
t1 = ir >> 16; \
- yl = camellia_sp1110[(u8)(ir )] \
- ^ camellia_sp0222[ (t1 >> 8)] \
- ^ camellia_sp3033[(u8)(t1 )] \
+ yl = camellia_sp1110[(u8)(ir)] \
+ ^ camellia_sp0222[(u8)(t1 >> 8)] \
+ ^ camellia_sp3033[(u8)(t1)] \
^ camellia_sp4404[(u8)(ir >> 8)]; \
- yr = camellia_sp1110[ (t0 >> 8)] \
- ^ camellia_sp0222[(u8)(t0 )] \
+ yr = camellia_sp1110[(u8)(t0 >> 8)] \
+ ^ camellia_sp0222[(u8)(t0)] \
^ camellia_sp3033[(u8)(il >> 8)] \
- ^ camellia_sp4404[(u8)(il )]; \
+ ^ camellia_sp4404[(u8)(il)]; \
yl ^= yr; \
yr = ror32(yr, 8); \
yr ^= yl; \
- } while(0)
+})
#define SUBKEY_L(INDEX) (subkey[(INDEX)*2])
#define SUBKEY_R(INDEX) (subkey[(INDEX)*2 + 1])
@@ -382,7 +379,6 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
{
u32 dw, tl, tr;
u32 kw4l, kw4r;
- int i;
/* absorb kw2 to other subkeys */
/* round 2 */
@@ -392,8 +388,8 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
/* round 6 */
subL[7] ^= subL[1]; subR[7] ^= subR[1];
subL[1] ^= subR[1] & ~subR[9];
- dw = subL[1] & subL[9],
- subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl2) */
+ dw = subL[1] & subL[9];
+ subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl2) */
/* round 8 */
subL[11] ^= subL[1]; subR[11] ^= subR[1];
/* round 10 */
@@ -401,8 +397,8 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
/* round 12 */
subL[15] ^= subL[1]; subR[15] ^= subR[1];
subL[1] ^= subR[1] & ~subR[17];
- dw = subL[1] & subL[17],
- subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl4) */
+ dw = subL[1] & subL[17];
+ subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl4) */
/* round 14 */
subL[19] ^= subL[1]; subR[19] ^= subR[1];
/* round 16 */
@@ -417,8 +413,8 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
kw4l = subL[25]; kw4r = subR[25];
} else {
subL[1] ^= subR[1] & ~subR[25];
- dw = subL[1] & subL[25],
- subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl6) */
+ dw = subL[1] & subL[25];
+ subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl6) */
/* round 20 */
subL[27] ^= subL[1]; subR[27] ^= subR[1];
/* round 22 */
@@ -437,8 +433,8 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
/* round 19 */
subL[26] ^= kw4l; subR[26] ^= kw4r;
kw4l ^= kw4r & ~subR[24];
- dw = kw4l & subL[24],
- kw4r ^= rol32(dw, 1); /* modified for FL(kl5) */
+ dw = kw4l & subL[24];
+ kw4r ^= rol32(dw, 1); /* modified for FL(kl5) */
}
/* round 17 */
subL[22] ^= kw4l; subR[22] ^= kw4r;
@@ -447,8 +443,8 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
/* round 13 */
subL[18] ^= kw4l; subR[18] ^= kw4r;
kw4l ^= kw4r & ~subR[16];
- dw = kw4l & subL[16],
- kw4r ^= rol32(dw, 1); /* modified for FL(kl3) */
+ dw = kw4l & subL[16];
+ kw4r ^= rol32(dw, 1); /* modified for FL(kl3) */
/* round 11 */
subL[14] ^= kw4l; subR[14] ^= kw4r;
/* round 9 */
@@ -456,8 +452,8 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
/* round 7 */
subL[10] ^= kw4l; subR[10] ^= kw4r;
kw4l ^= kw4r & ~subR[8];
- dw = kw4l & subL[8],
- kw4r ^= rol32(dw, 1); /* modified for FL(kl1) */
+ dw = kw4l & subL[8];
+ kw4r ^= rol32(dw, 1); /* modified for FL(kl1) */
/* round 5 */
subL[6] ^= kw4l; subR[6] ^= kw4r;
/* round 3 */
@@ -481,8 +477,8 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_L(6) = subL[5] ^ subL[7]; /* round 5 */
SUBKEY_R(6) = subR[5] ^ subR[7];
tl = subL[10] ^ (subR[10] & ~subR[8]);
- dw = tl & subL[8], /* FL(kl1) */
- tr = subR[10] ^ rol32(dw, 1);
+ dw = tl & subL[8]; /* FL(kl1) */
+ tr = subR[10] ^ rol32(dw, 1);
SUBKEY_L(7) = subL[6] ^ tl; /* round 6 */
SUBKEY_R(7) = subR[6] ^ tr;
SUBKEY_L(8) = subL[8]; /* FL(kl1) */
@@ -490,8 +486,8 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_L(9) = subL[9]; /* FLinv(kl2) */
SUBKEY_R(9) = subR[9];
tl = subL[7] ^ (subR[7] & ~subR[9]);
- dw = tl & subL[9], /* FLinv(kl2) */
- tr = subR[7] ^ rol32(dw, 1);
+ dw = tl & subL[9]; /* FLinv(kl2) */
+ tr = subR[7] ^ rol32(dw, 1);
SUBKEY_L(10) = tl ^ subL[11]; /* round 7 */
SUBKEY_R(10) = tr ^ subR[11];
SUBKEY_L(11) = subL[10] ^ subL[12]; /* round 8 */
@@ -503,8 +499,8 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_L(14) = subL[13] ^ subL[15]; /* round 11 */
SUBKEY_R(14) = subR[13] ^ subR[15];
tl = subL[18] ^ (subR[18] & ~subR[16]);
- dw = tl & subL[16], /* FL(kl3) */
- tr = subR[18] ^ rol32(dw, 1);
+ dw = tl & subL[16]; /* FL(kl3) */
+ tr = subR[18] ^ rol32(dw, 1);
SUBKEY_L(15) = subL[14] ^ tl; /* round 12 */
SUBKEY_R(15) = subR[14] ^ tr;
SUBKEY_L(16) = subL[16]; /* FL(kl3) */
@@ -512,8 +508,8 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_L(17) = subL[17]; /* FLinv(kl4) */
SUBKEY_R(17) = subR[17];
tl = subL[15] ^ (subR[15] & ~subR[17]);
- dw = tl & subL[17], /* FLinv(kl4) */
- tr = subR[15] ^ rol32(dw, 1);
+ dw = tl & subL[17]; /* FLinv(kl4) */
+ tr = subR[15] ^ rol32(dw, 1);
SUBKEY_L(18) = tl ^ subL[19]; /* round 13 */
SUBKEY_R(18) = tr ^ subR[19];
SUBKEY_L(19) = subL[18] ^ subL[20]; /* round 14 */
@@ -531,8 +527,8 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_R(24) = subR[24] ^ subR[23];
} else {
tl = subL[26] ^ (subR[26] & ~subR[24]);
- dw = tl & subL[24], /* FL(kl5) */
- tr = subR[26] ^ rol32(dw, 1);
+ dw = tl & subL[24]; /* FL(kl5) */
+ tr = subR[26] ^ rol32(dw, 1);
SUBKEY_L(23) = subL[22] ^ tl; /* round 18 */
SUBKEY_R(23) = subR[22] ^ tr;
SUBKEY_L(24) = subL[24]; /* FL(kl5) */
@@ -540,8 +536,8 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_L(25) = subL[25]; /* FLinv(kl6) */
SUBKEY_R(25) = subR[25];
tl = subL[23] ^ (subR[23] & ~subR[25]);
- dw = tl & subL[25], /* FLinv(kl6) */
- tr = subR[23] ^ rol32(dw, 1);
+ dw = tl & subL[25]; /* FLinv(kl6) */
+ tr = subR[23] ^ rol32(dw, 1);
SUBKEY_L(26) = tl ^ subL[27]; /* round 19 */
SUBKEY_R(26) = tr ^ subR[27];
SUBKEY_L(27) = subL[26] ^ subL[28]; /* round 20 */
@@ -557,24 +553,6 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_L(32) = subL[32] ^ subL[31]; /* kw3 */
SUBKEY_R(32) = subR[32] ^ subR[31];
}
-
- /* apply the inverse of the last half of P-function */
- i = 2;
- do {
- dw = SUBKEY_L(i + 0) ^ SUBKEY_R(i + 0); dw = rol32(dw, 8);/* round 1 */
- SUBKEY_R(i + 0) = SUBKEY_L(i + 0) ^ dw; SUBKEY_L(i + 0) = dw;
- dw = SUBKEY_L(i + 1) ^ SUBKEY_R(i + 1); dw = rol32(dw, 8);/* round 2 */
- SUBKEY_R(i + 1) = SUBKEY_L(i + 1) ^ dw; SUBKEY_L(i + 1) = dw;
- dw = SUBKEY_L(i + 2) ^ SUBKEY_R(i + 2); dw = rol32(dw, 8);/* round 3 */
- SUBKEY_R(i + 2) = SUBKEY_L(i + 2) ^ dw; SUBKEY_L(i + 2) = dw;
- dw = SUBKEY_L(i + 3) ^ SUBKEY_R(i + 3); dw = rol32(dw, 8);/* round 4 */
- SUBKEY_R(i + 3) = SUBKEY_L(i + 3) ^ dw; SUBKEY_L(i + 3) = dw;
- dw = SUBKEY_L(i + 4) ^ SUBKEY_R(i + 4); dw = rol32(dw, 8);/* round 5 */
- SUBKEY_R(i + 4) = SUBKEY_L(i + 4) ^ dw; SUBKEY_L(i + 4) = dw;
- dw = SUBKEY_L(i + 5) ^ SUBKEY_R(i + 5); dw = rol32(dw, 8);/* round 6 */
- SUBKEY_R(i + 5) = SUBKEY_L(i + 5) ^ dw; SUBKEY_L(i + 5) = dw;
- i += 8;
- } while (i < max);
}
static void camellia_setup128(const unsigned char *key, u32 *subkey)
@@ -835,7 +813,7 @@ static void camellia_setup256(const unsigned char *key, u32 *subkey)
static void camellia_setup192(const unsigned char *key, u32 *subkey)
{
unsigned char kk[32];
- u32 krll, krlr, krrl,krrr;
+ u32 krll, krlr, krrl, krrr;
memcpy(kk, key, 24);
memcpy((unsigned char *)&krll, key+16, 4);
@@ -851,8 +829,7 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey)
/*
* Encrypt/decrypt
*/
-#define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) \
- do { \
+#define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) ({ \
t0 = kll; \
t2 = krr; \
t0 &= ll; \
@@ -865,60 +842,60 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey)
t1 |= lr; \
ll ^= t1; \
rr ^= rol32(t3, 1); \
- } while(0)
+})
-#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir) \
- do { \
+#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir) ({ \
+ yl ^= kl; \
+ yr ^= kr; \
ir = camellia_sp1110[(u8)xr]; \
- il = camellia_sp1110[ (xl >> 24)]; \
- ir ^= camellia_sp0222[ (xr >> 24)]; \
+ il = camellia_sp1110[(u8)(xl >> 24)]; \
+ ir ^= camellia_sp0222[(u8)(xr >> 24)]; \
il ^= camellia_sp0222[(u8)(xl >> 16)]; \
ir ^= camellia_sp3033[(u8)(xr >> 16)]; \
il ^= camellia_sp3033[(u8)(xl >> 8)]; \
ir ^= camellia_sp4404[(u8)(xr >> 8)]; \
il ^= camellia_sp4404[(u8)xl]; \
- il ^= kl; \
- ir ^= il ^ kr; \
+ ir ^= il; \
yl ^= ir; \
- yr ^= ror32(il, 8) ^ ir; \
- } while(0)
+ yr ^= ror32(il, 8) ^ ir; \
+})
/* max = 24: 128bit encrypt, max = 32: 256bit encrypt */
static void camellia_do_encrypt(const u32 *subkey, u32 *io, unsigned max)
{
- u32 il,ir,t0,t1; /* temporary variables */
+ u32 il, ir, t0, t1; /* temporary variables */
/* pre whitening but absorb kw2 */
io[0] ^= SUBKEY_L(0);
io[1] ^= SUBKEY_R(0);
/* main iteration */
-#define ROUNDS(i) do { \
- CAMELLIA_ROUNDSM(io[0],io[1], \
- SUBKEY_L(i + 2),SUBKEY_R(i + 2), \
- io[2],io[3],il,ir); \
- CAMELLIA_ROUNDSM(io[2],io[3], \
- SUBKEY_L(i + 3),SUBKEY_R(i + 3), \
- io[0],io[1],il,ir); \
- CAMELLIA_ROUNDSM(io[0],io[1], \
- SUBKEY_L(i + 4),SUBKEY_R(i + 4), \
- io[2],io[3],il,ir); \
- CAMELLIA_ROUNDSM(io[2],io[3], \
- SUBKEY_L(i + 5),SUBKEY_R(i + 5), \
- io[0],io[1],il,ir); \
- CAMELLIA_ROUNDSM(io[0],io[1], \
- SUBKEY_L(i + 6),SUBKEY_R(i + 6), \
- io[2],io[3],il,ir); \
- CAMELLIA_ROUNDSM(io[2],io[3], \
- SUBKEY_L(i + 7),SUBKEY_R(i + 7), \
- io[0],io[1],il,ir); \
-} while (0)
-#define FLS(i) do { \
- CAMELLIA_FLS(io[0],io[1],io[2],io[3], \
- SUBKEY_L(i + 0),SUBKEY_R(i + 0), \
- SUBKEY_L(i + 1),SUBKEY_R(i + 1), \
- t0,t1,il,ir); \
-} while (0)
+#define ROUNDS(i) ({ \
+ CAMELLIA_ROUNDSM(io[0], io[1], \
+ SUBKEY_L(i + 2), SUBKEY_R(i + 2), \
+ io[2], io[3], il, ir); \
+ CAMELLIA_ROUNDSM(io[2], io[3], \
+ SUBKEY_L(i + 3), SUBKEY_R(i + 3), \
+ io[0], io[1], il, ir); \
+ CAMELLIA_ROUNDSM(io[0], io[1], \
+ SUBKEY_L(i + 4), SUBKEY_R(i + 4), \
+ io[2], io[3], il, ir); \
+ CAMELLIA_ROUNDSM(io[2], io[3], \
+ SUBKEY_L(i + 5), SUBKEY_R(i + 5), \
+ io[0], io[1], il, ir); \
+ CAMELLIA_ROUNDSM(io[0], io[1], \
+ SUBKEY_L(i + 6), SUBKEY_R(i + 6), \
+ io[2], io[3], il, ir); \
+ CAMELLIA_ROUNDSM(io[2], io[3], \
+ SUBKEY_L(i + 7), SUBKEY_R(i + 7), \
+ io[0], io[1], il, ir); \
+})
+#define FLS(i) ({ \
+ CAMELLIA_FLS(io[0], io[1], io[2], io[3], \
+ SUBKEY_L(i + 0), SUBKEY_R(i + 0), \
+ SUBKEY_L(i + 1), SUBKEY_R(i + 1), \
+ t0, t1, il, ir); \
+})
ROUNDS(0);
FLS(8);
@@ -941,39 +918,39 @@ static void camellia_do_encrypt(const u32 *subkey, u32 *io, unsigned max)
static void camellia_do_decrypt(const u32 *subkey, u32 *io, unsigned i)
{
- u32 il,ir,t0,t1; /* temporary variables */
+ u32 il, ir, t0, t1; /* temporary variables */
/* pre whitening but absorb kw2 */
io[0] ^= SUBKEY_L(i);
io[1] ^= SUBKEY_R(i);
/* main iteration */
-#define ROUNDS(i) do { \
- CAMELLIA_ROUNDSM(io[0],io[1], \
- SUBKEY_L(i + 7),SUBKEY_R(i + 7), \
- io[2],io[3],il,ir); \
- CAMELLIA_ROUNDSM(io[2],io[3], \
- SUBKEY_L(i + 6),SUBKEY_R(i + 6), \
- io[0],io[1],il,ir); \
- CAMELLIA_ROUNDSM(io[0],io[1], \
- SUBKEY_L(i + 5),SUBKEY_R(i + 5), \
- io[2],io[3],il,ir); \
- CAMELLIA_ROUNDSM(io[2],io[3], \
- SUBKEY_L(i + 4),SUBKEY_R(i + 4), \
- io[0],io[1],il,ir); \
- CAMELLIA_ROUNDSM(io[0],io[1], \
- SUBKEY_L(i + 3),SUBKEY_R(i + 3), \
- io[2],io[3],il,ir); \
- CAMELLIA_ROUNDSM(io[2],io[3], \
- SUBKEY_L(i + 2),SUBKEY_R(i + 2), \
- io[0],io[1],il,ir); \
-} while (0)
-#define FLS(i) do { \
- CAMELLIA_FLS(io[0],io[1],io[2],io[3], \
- SUBKEY_L(i + 1),SUBKEY_R(i + 1), \
- SUBKEY_L(i + 0),SUBKEY_R(i + 0), \
- t0,t1,il,ir); \
-} while (0)
+#define ROUNDS(i) ({ \
+ CAMELLIA_ROUNDSM(io[0], io[1], \
+ SUBKEY_L(i + 7), SUBKEY_R(i + 7), \
+ io[2], io[3], il, ir); \
+ CAMELLIA_ROUNDSM(io[2], io[3], \
+ SUBKEY_L(i + 6), SUBKEY_R(i + 6), \
+ io[0], io[1], il, ir); \
+ CAMELLIA_ROUNDSM(io[0], io[1], \
+ SUBKEY_L(i + 5), SUBKEY_R(i + 5), \
+ io[2], io[3], il, ir); \
+ CAMELLIA_ROUNDSM(io[2], io[3], \
+ SUBKEY_L(i + 4), SUBKEY_R(i + 4), \
+ io[0], io[1], il, ir); \
+ CAMELLIA_ROUNDSM(io[0], io[1], \
+ SUBKEY_L(i + 3), SUBKEY_R(i + 3), \
+ io[2], io[3], il, ir); \
+ CAMELLIA_ROUNDSM(io[2], io[3], \
+ SUBKEY_L(i + 2), SUBKEY_R(i + 2), \
+ io[0], io[1], il, ir); \
+})
+#define FLS(i) ({ \
+ CAMELLIA_FLS(io[0], io[1], io[2], io[3], \
+ SUBKEY_L(i + 1), SUBKEY_R(i + 1), \
+ SUBKEY_L(i + 0), SUBKEY_R(i + 0), \
+ t0, t1, il, ir); \
+})
if (i == 32) {
ROUNDS(24);
@@ -1035,6 +1012,7 @@ static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm);
const __be32 *src = (const __be32 *)in;
__be32 *dst = (__be32 *)out;
+ unsigned int max;
u32 tmp[4];
@@ -1043,9 +1021,12 @@ static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
tmp[2] = be32_to_cpu(src[2]);
tmp[3] = be32_to_cpu(src[3]);
- camellia_do_encrypt(cctx->key_table, tmp,
- cctx->key_length == 16 ? 24 : 32 /* for key lengths of 24 and 32 */
- );
+ if (cctx->key_length == 16)
+ max = 24;
+ else
+ max = 32; /* for key lengths of 24 and 32 */
+
+ camellia_do_encrypt(cctx->key_table, tmp, max);
/* do_encrypt returns 0,1 swapped with 2,3 */
dst[0] = cpu_to_be32(tmp[2]);
@@ -1059,6 +1040,7 @@ static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm);
const __be32 *src = (const __be32 *)in;
__be32 *dst = (__be32 *)out;
+ unsigned int max;
u32 tmp[4];
@@ -1067,9 +1049,12 @@ static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
tmp[2] = be32_to_cpu(src[2]);
tmp[3] = be32_to_cpu(src[3]);
- camellia_do_decrypt(cctx->key_table, tmp,
- cctx->key_length == 16 ? 24 : 32 /* for key lengths of 24 and 32 */
- );
+ if (cctx->key_length == 16)
+ max = 24;
+ else
+ max = 32; /* for key lengths of 24 and 32 */
+
+ camellia_do_decrypt(cctx->key_table, tmp, max);
/* do_decrypt returns 0,1 swapped with 2,3 */
dst[0] = cpu_to_be32(tmp[2]);
@@ -1087,7 +1072,6 @@ static struct crypto_alg camellia_alg = {
.cra_ctxsize = sizeof(struct camellia_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(camellia_alg.cra_list),
.cra_u = {
.cipher = {
.cia_min_keysize = CAMELLIA_MIN_KEY_SIZE,
@@ -1114,3 +1098,4 @@ module_exit(camellia_fini);
MODULE_DESCRIPTION("Camellia Cipher Algorithm");
MODULE_LICENSE("GPL");
+MODULE_ALIAS("camellia");
diff --git a/crypto/cast5.c b/crypto/cast5_generic.c
index 8cbe28fa0e0..5558f630a0e 100644
--- a/crypto/cast5.c
+++ b/crypto/cast5_generic.c
@@ -4,8 +4,8 @@
* Derived from GnuPG implementation of cast5.
*
* Major Changes.
-* Complete conformance to rfc2144.
-* Supports key size from 40 to 128 bits.
+* Complete conformance to rfc2144.
+* Supports key size from 40 to 128 bits.
*
* Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
* Copyright (C) 2003 Kartikey Mahendra Bhatt <kartik_me@hotmail.com>.
@@ -28,282 +28,8 @@
#include <linux/errno.h>
#include <linux/string.h>
#include <linux/types.h>
+#include <crypto/cast5.h>
-#define CAST5_BLOCK_SIZE 8
-#define CAST5_MIN_KEY_SIZE 5
-#define CAST5_MAX_KEY_SIZE 16
-
-struct cast5_ctx {
- u32 Km[16];
- u8 Kr[16];
- int rr; /* rr?number of rounds = 16:number of rounds = 12; (rfc 2144) */
-};
-
-
-static const u32 s1[256] = {
- 0x30fb40d4, 0x9fa0ff0b, 0x6beccd2f, 0x3f258c7a, 0x1e213f2f,
- 0x9c004dd3, 0x6003e540, 0xcf9fc949,
- 0xbfd4af27, 0x88bbbdb5, 0xe2034090, 0x98d09675, 0x6e63a0e0,
- 0x15c361d2, 0xc2e7661d, 0x22d4ff8e,
- 0x28683b6f, 0xc07fd059, 0xff2379c8, 0x775f50e2, 0x43c340d3,
- 0xdf2f8656, 0x887ca41a, 0xa2d2bd2d,
- 0xa1c9e0d6, 0x346c4819, 0x61b76d87, 0x22540f2f, 0x2abe32e1,
- 0xaa54166b, 0x22568e3a, 0xa2d341d0,
- 0x66db40c8, 0xa784392f, 0x004dff2f, 0x2db9d2de, 0x97943fac,
- 0x4a97c1d8, 0x527644b7, 0xb5f437a7,
- 0xb82cbaef, 0xd751d159, 0x6ff7f0ed, 0x5a097a1f, 0x827b68d0,
- 0x90ecf52e, 0x22b0c054, 0xbc8e5935,
- 0x4b6d2f7f, 0x50bb64a2, 0xd2664910, 0xbee5812d, 0xb7332290,
- 0xe93b159f, 0xb48ee411, 0x4bff345d,
- 0xfd45c240, 0xad31973f, 0xc4f6d02e, 0x55fc8165, 0xd5b1caad,
- 0xa1ac2dae, 0xa2d4b76d, 0xc19b0c50,
- 0x882240f2, 0x0c6e4f38, 0xa4e4bfd7, 0x4f5ba272, 0x564c1d2f,
- 0xc59c5319, 0xb949e354, 0xb04669fe,
- 0xb1b6ab8a, 0xc71358dd, 0x6385c545, 0x110f935d, 0x57538ad5,
- 0x6a390493, 0xe63d37e0, 0x2a54f6b3,
- 0x3a787d5f, 0x6276a0b5, 0x19a6fcdf, 0x7a42206a, 0x29f9d4d5,
- 0xf61b1891, 0xbb72275e, 0xaa508167,
- 0x38901091, 0xc6b505eb, 0x84c7cb8c, 0x2ad75a0f, 0x874a1427,
- 0xa2d1936b, 0x2ad286af, 0xaa56d291,
- 0xd7894360, 0x425c750d, 0x93b39e26, 0x187184c9, 0x6c00b32d,
- 0x73e2bb14, 0xa0bebc3c, 0x54623779,
- 0x64459eab, 0x3f328b82, 0x7718cf82, 0x59a2cea6, 0x04ee002e,
- 0x89fe78e6, 0x3fab0950, 0x325ff6c2,
- 0x81383f05, 0x6963c5c8, 0x76cb5ad6, 0xd49974c9, 0xca180dcf,
- 0x380782d5, 0xc7fa5cf6, 0x8ac31511,
- 0x35e79e13, 0x47da91d0, 0xf40f9086, 0xa7e2419e, 0x31366241,
- 0x051ef495, 0xaa573b04, 0x4a805d8d,
- 0x548300d0, 0x00322a3c, 0xbf64cddf, 0xba57a68e, 0x75c6372b,
- 0x50afd341, 0xa7c13275, 0x915a0bf5,
- 0x6b54bfab, 0x2b0b1426, 0xab4cc9d7, 0x449ccd82, 0xf7fbf265,
- 0xab85c5f3, 0x1b55db94, 0xaad4e324,
- 0xcfa4bd3f, 0x2deaa3e2, 0x9e204d02, 0xc8bd25ac, 0xeadf55b3,
- 0xd5bd9e98, 0xe31231b2, 0x2ad5ad6c,
- 0x954329de, 0xadbe4528, 0xd8710f69, 0xaa51c90f, 0xaa786bf6,
- 0x22513f1e, 0xaa51a79b, 0x2ad344cc,
- 0x7b5a41f0, 0xd37cfbad, 0x1b069505, 0x41ece491, 0xb4c332e6,
- 0x032268d4, 0xc9600acc, 0xce387e6d,
- 0xbf6bb16c, 0x6a70fb78, 0x0d03d9c9, 0xd4df39de, 0xe01063da,
- 0x4736f464, 0x5ad328d8, 0xb347cc96,
- 0x75bb0fc3, 0x98511bfb, 0x4ffbcc35, 0xb58bcf6a, 0xe11f0abc,
- 0xbfc5fe4a, 0xa70aec10, 0xac39570a,
- 0x3f04442f, 0x6188b153, 0xe0397a2e, 0x5727cb79, 0x9ceb418f,
- 0x1cacd68d, 0x2ad37c96, 0x0175cb9d,
- 0xc69dff09, 0xc75b65f0, 0xd9db40d8, 0xec0e7779, 0x4744ead4,
- 0xb11c3274, 0xdd24cb9e, 0x7e1c54bd,
- 0xf01144f9, 0xd2240eb1, 0x9675b3fd, 0xa3ac3755, 0xd47c27af,
- 0x51c85f4d, 0x56907596, 0xa5bb15e6,
- 0x580304f0, 0xca042cf1, 0x011a37ea, 0x8dbfaadb, 0x35ba3e4a,
- 0x3526ffa0, 0xc37b4d09, 0xbc306ed9,
- 0x98a52666, 0x5648f725, 0xff5e569d, 0x0ced63d0, 0x7c63b2cf,
- 0x700b45e1, 0xd5ea50f1, 0x85a92872,
- 0xaf1fbda7, 0xd4234870, 0xa7870bf3, 0x2d3b4d79, 0x42e04198,
- 0x0cd0ede7, 0x26470db8, 0xf881814c,
- 0x474d6ad7, 0x7c0c5e5c, 0xd1231959, 0x381b7298, 0xf5d2f4db,
- 0xab838653, 0x6e2f1e23, 0x83719c9e,
- 0xbd91e046, 0x9a56456e, 0xdc39200c, 0x20c8c571, 0x962bda1c,
- 0xe1e696ff, 0xb141ab08, 0x7cca89b9,
- 0x1a69e783, 0x02cc4843, 0xa2f7c579, 0x429ef47d, 0x427b169c,
- 0x5ac9f049, 0xdd8f0f00, 0x5c8165bf
-};
-static const u32 s2[256] = {
- 0x1f201094, 0xef0ba75b, 0x69e3cf7e, 0x393f4380, 0xfe61cf7a,
- 0xeec5207a, 0x55889c94, 0x72fc0651,
- 0xada7ef79, 0x4e1d7235, 0xd55a63ce, 0xde0436ba, 0x99c430ef,
- 0x5f0c0794, 0x18dcdb7d, 0xa1d6eff3,
- 0xa0b52f7b, 0x59e83605, 0xee15b094, 0xe9ffd909, 0xdc440086,
- 0xef944459, 0xba83ccb3, 0xe0c3cdfb,
- 0xd1da4181, 0x3b092ab1, 0xf997f1c1, 0xa5e6cf7b, 0x01420ddb,
- 0xe4e7ef5b, 0x25a1ff41, 0xe180f806,
- 0x1fc41080, 0x179bee7a, 0xd37ac6a9, 0xfe5830a4, 0x98de8b7f,
- 0x77e83f4e, 0x79929269, 0x24fa9f7b,
- 0xe113c85b, 0xacc40083, 0xd7503525, 0xf7ea615f, 0x62143154,
- 0x0d554b63, 0x5d681121, 0xc866c359,
- 0x3d63cf73, 0xcee234c0, 0xd4d87e87, 0x5c672b21, 0x071f6181,
- 0x39f7627f, 0x361e3084, 0xe4eb573b,
- 0x602f64a4, 0xd63acd9c, 0x1bbc4635, 0x9e81032d, 0x2701f50c,
- 0x99847ab4, 0xa0e3df79, 0xba6cf38c,
- 0x10843094, 0x2537a95e, 0xf46f6ffe, 0xa1ff3b1f, 0x208cfb6a,
- 0x8f458c74, 0xd9e0a227, 0x4ec73a34,
- 0xfc884f69, 0x3e4de8df, 0xef0e0088, 0x3559648d, 0x8a45388c,
- 0x1d804366, 0x721d9bfd, 0xa58684bb,
- 0xe8256333, 0x844e8212, 0x128d8098, 0xfed33fb4, 0xce280ae1,
- 0x27e19ba5, 0xd5a6c252, 0xe49754bd,
- 0xc5d655dd, 0xeb667064, 0x77840b4d, 0xa1b6a801, 0x84db26a9,
- 0xe0b56714, 0x21f043b7, 0xe5d05860,
- 0x54f03084, 0x066ff472, 0xa31aa153, 0xdadc4755, 0xb5625dbf,
- 0x68561be6, 0x83ca6b94, 0x2d6ed23b,
- 0xeccf01db, 0xa6d3d0ba, 0xb6803d5c, 0xaf77a709, 0x33b4a34c,
- 0x397bc8d6, 0x5ee22b95, 0x5f0e5304,
- 0x81ed6f61, 0x20e74364, 0xb45e1378, 0xde18639b, 0x881ca122,
- 0xb96726d1, 0x8049a7e8, 0x22b7da7b,
- 0x5e552d25, 0x5272d237, 0x79d2951c, 0xc60d894c, 0x488cb402,
- 0x1ba4fe5b, 0xa4b09f6b, 0x1ca815cf,
- 0xa20c3005, 0x8871df63, 0xb9de2fcb, 0x0cc6c9e9, 0x0beeff53,
- 0xe3214517, 0xb4542835, 0x9f63293c,
- 0xee41e729, 0x6e1d2d7c, 0x50045286, 0x1e6685f3, 0xf33401c6,
- 0x30a22c95, 0x31a70850, 0x60930f13,
- 0x73f98417, 0xa1269859, 0xec645c44, 0x52c877a9, 0xcdff33a6,
- 0xa02b1741, 0x7cbad9a2, 0x2180036f,
- 0x50d99c08, 0xcb3f4861, 0xc26bd765, 0x64a3f6ab, 0x80342676,
- 0x25a75e7b, 0xe4e6d1fc, 0x20c710e6,
- 0xcdf0b680, 0x17844d3b, 0x31eef84d, 0x7e0824e4, 0x2ccb49eb,
- 0x846a3bae, 0x8ff77888, 0xee5d60f6,
- 0x7af75673, 0x2fdd5cdb, 0xa11631c1, 0x30f66f43, 0xb3faec54,
- 0x157fd7fa, 0xef8579cc, 0xd152de58,
- 0xdb2ffd5e, 0x8f32ce19, 0x306af97a, 0x02f03ef8, 0x99319ad5,
- 0xc242fa0f, 0xa7e3ebb0, 0xc68e4906,
- 0xb8da230c, 0x80823028, 0xdcdef3c8, 0xd35fb171, 0x088a1bc8,
- 0xbec0c560, 0x61a3c9e8, 0xbca8f54d,
- 0xc72feffa, 0x22822e99, 0x82c570b4, 0xd8d94e89, 0x8b1c34bc,
- 0x301e16e6, 0x273be979, 0xb0ffeaa6,
- 0x61d9b8c6, 0x00b24869, 0xb7ffce3f, 0x08dc283b, 0x43daf65a,
- 0xf7e19798, 0x7619b72f, 0x8f1c9ba4,
- 0xdc8637a0, 0x16a7d3b1, 0x9fc393b7, 0xa7136eeb, 0xc6bcc63e,
- 0x1a513742, 0xef6828bc, 0x520365d6,
- 0x2d6a77ab, 0x3527ed4b, 0x821fd216, 0x095c6e2e, 0xdb92f2fb,
- 0x5eea29cb, 0x145892f5, 0x91584f7f,
- 0x5483697b, 0x2667a8cc, 0x85196048, 0x8c4bacea, 0x833860d4,
- 0x0d23e0f9, 0x6c387e8a, 0x0ae6d249,
- 0xb284600c, 0xd835731d, 0xdcb1c647, 0xac4c56ea, 0x3ebd81b3,
- 0x230eabb0, 0x6438bc87, 0xf0b5b1fa,
- 0x8f5ea2b3, 0xfc184642, 0x0a036b7a, 0x4fb089bd, 0x649da589,
- 0xa345415e, 0x5c038323, 0x3e5d3bb9,
- 0x43d79572, 0x7e6dd07c, 0x06dfdf1e, 0x6c6cc4ef, 0x7160a539,
- 0x73bfbe70, 0x83877605, 0x4523ecf1
-};
-static const u32 s3[256] = {
- 0x8defc240, 0x25fa5d9f, 0xeb903dbf, 0xe810c907, 0x47607fff,
- 0x369fe44b, 0x8c1fc644, 0xaececa90,
- 0xbeb1f9bf, 0xeefbcaea, 0xe8cf1950, 0x51df07ae, 0x920e8806,
- 0xf0ad0548, 0xe13c8d83, 0x927010d5,
- 0x11107d9f, 0x07647db9, 0xb2e3e4d4, 0x3d4f285e, 0xb9afa820,
- 0xfade82e0, 0xa067268b, 0x8272792e,
- 0x553fb2c0, 0x489ae22b, 0xd4ef9794, 0x125e3fbc, 0x21fffcee,
- 0x825b1bfd, 0x9255c5ed, 0x1257a240,
- 0x4e1a8302, 0xbae07fff, 0x528246e7, 0x8e57140e, 0x3373f7bf,
- 0x8c9f8188, 0xa6fc4ee8, 0xc982b5a5,
- 0xa8c01db7, 0x579fc264, 0x67094f31, 0xf2bd3f5f, 0x40fff7c1,
- 0x1fb78dfc, 0x8e6bd2c1, 0x437be59b,
- 0x99b03dbf, 0xb5dbc64b, 0x638dc0e6, 0x55819d99, 0xa197c81c,
- 0x4a012d6e, 0xc5884a28, 0xccc36f71,
- 0xb843c213, 0x6c0743f1, 0x8309893c, 0x0feddd5f, 0x2f7fe850,
- 0xd7c07f7e, 0x02507fbf, 0x5afb9a04,
- 0xa747d2d0, 0x1651192e, 0xaf70bf3e, 0x58c31380, 0x5f98302e,
- 0x727cc3c4, 0x0a0fb402, 0x0f7fef82,
- 0x8c96fdad, 0x5d2c2aae, 0x8ee99a49, 0x50da88b8, 0x8427f4a0,
- 0x1eac5790, 0x796fb449, 0x8252dc15,
- 0xefbd7d9b, 0xa672597d, 0xada840d8, 0x45f54504, 0xfa5d7403,
- 0xe83ec305, 0x4f91751a, 0x925669c2,
- 0x23efe941, 0xa903f12e, 0x60270df2, 0x0276e4b6, 0x94fd6574,
- 0x927985b2, 0x8276dbcb, 0x02778176,
- 0xf8af918d, 0x4e48f79e, 0x8f616ddf, 0xe29d840e, 0x842f7d83,
- 0x340ce5c8, 0x96bbb682, 0x93b4b148,
- 0xef303cab, 0x984faf28, 0x779faf9b, 0x92dc560d, 0x224d1e20,
- 0x8437aa88, 0x7d29dc96, 0x2756d3dc,
- 0x8b907cee, 0xb51fd240, 0xe7c07ce3, 0xe566b4a1, 0xc3e9615e,
- 0x3cf8209d, 0x6094d1e3, 0xcd9ca341,
- 0x5c76460e, 0x00ea983b, 0xd4d67881, 0xfd47572c, 0xf76cedd9,
- 0xbda8229c, 0x127dadaa, 0x438a074e,
- 0x1f97c090, 0x081bdb8a, 0x93a07ebe, 0xb938ca15, 0x97b03cff,
- 0x3dc2c0f8, 0x8d1ab2ec, 0x64380e51,
- 0x68cc7bfb, 0xd90f2788, 0x12490181, 0x5de5ffd4, 0xdd7ef86a,
- 0x76a2e214, 0xb9a40368, 0x925d958f,
- 0x4b39fffa, 0xba39aee9, 0xa4ffd30b, 0xfaf7933b, 0x6d498623,
- 0x193cbcfa, 0x27627545, 0x825cf47a,
- 0x61bd8ba0, 0xd11e42d1, 0xcead04f4, 0x127ea392, 0x10428db7,
- 0x8272a972, 0x9270c4a8, 0x127de50b,
- 0x285ba1c8, 0x3c62f44f, 0x35c0eaa5, 0xe805d231, 0x428929fb,
- 0xb4fcdf82, 0x4fb66a53, 0x0e7dc15b,
- 0x1f081fab, 0x108618ae, 0xfcfd086d, 0xf9ff2889, 0x694bcc11,
- 0x236a5cae, 0x12deca4d, 0x2c3f8cc5,
- 0xd2d02dfe, 0xf8ef5896, 0xe4cf52da, 0x95155b67, 0x494a488c,
- 0xb9b6a80c, 0x5c8f82bc, 0x89d36b45,
- 0x3a609437, 0xec00c9a9, 0x44715253, 0x0a874b49, 0xd773bc40,
- 0x7c34671c, 0x02717ef6, 0x4feb5536,
- 0xa2d02fff, 0xd2bf60c4, 0xd43f03c0, 0x50b4ef6d, 0x07478cd1,
- 0x006e1888, 0xa2e53f55, 0xb9e6d4bc,
- 0xa2048016, 0x97573833, 0xd7207d67, 0xde0f8f3d, 0x72f87b33,
- 0xabcc4f33, 0x7688c55d, 0x7b00a6b0,
- 0x947b0001, 0x570075d2, 0xf9bb88f8, 0x8942019e, 0x4264a5ff,
- 0x856302e0, 0x72dbd92b, 0xee971b69,
- 0x6ea22fde, 0x5f08ae2b, 0xaf7a616d, 0xe5c98767, 0xcf1febd2,
- 0x61efc8c2, 0xf1ac2571, 0xcc8239c2,
- 0x67214cb8, 0xb1e583d1, 0xb7dc3e62, 0x7f10bdce, 0xf90a5c38,
- 0x0ff0443d, 0x606e6dc6, 0x60543a49,
- 0x5727c148, 0x2be98a1d, 0x8ab41738, 0x20e1be24, 0xaf96da0f,
- 0x68458425, 0x99833be5, 0x600d457d,
- 0x282f9350, 0x8334b362, 0xd91d1120, 0x2b6d8da0, 0x642b1e31,
- 0x9c305a00, 0x52bce688, 0x1b03588a,
- 0xf7baefd5, 0x4142ed9c, 0xa4315c11, 0x83323ec5, 0xdfef4636,
- 0xa133c501, 0xe9d3531c, 0xee353783
-};
-static const u32 s4[256] = {
- 0x9db30420, 0x1fb6e9de, 0xa7be7bef, 0xd273a298, 0x4a4f7bdb,
- 0x64ad8c57, 0x85510443, 0xfa020ed1,
- 0x7e287aff, 0xe60fb663, 0x095f35a1, 0x79ebf120, 0xfd059d43,
- 0x6497b7b1, 0xf3641f63, 0x241e4adf,
- 0x28147f5f, 0x4fa2b8cd, 0xc9430040, 0x0cc32220, 0xfdd30b30,
- 0xc0a5374f, 0x1d2d00d9, 0x24147b15,
- 0xee4d111a, 0x0fca5167, 0x71ff904c, 0x2d195ffe, 0x1a05645f,
- 0x0c13fefe, 0x081b08ca, 0x05170121,
- 0x80530100, 0xe83e5efe, 0xac9af4f8, 0x7fe72701, 0xd2b8ee5f,
- 0x06df4261, 0xbb9e9b8a, 0x7293ea25,
- 0xce84ffdf, 0xf5718801, 0x3dd64b04, 0xa26f263b, 0x7ed48400,
- 0x547eebe6, 0x446d4ca0, 0x6cf3d6f5,
- 0x2649abdf, 0xaea0c7f5, 0x36338cc1, 0x503f7e93, 0xd3772061,
- 0x11b638e1, 0x72500e03, 0xf80eb2bb,
- 0xabe0502e, 0xec8d77de, 0x57971e81, 0xe14f6746, 0xc9335400,
- 0x6920318f, 0x081dbb99, 0xffc304a5,
- 0x4d351805, 0x7f3d5ce3, 0xa6c866c6, 0x5d5bcca9, 0xdaec6fea,
- 0x9f926f91, 0x9f46222f, 0x3991467d,
- 0xa5bf6d8e, 0x1143c44f, 0x43958302, 0xd0214eeb, 0x022083b8,
- 0x3fb6180c, 0x18f8931e, 0x281658e6,
- 0x26486e3e, 0x8bd78a70, 0x7477e4c1, 0xb506e07c, 0xf32d0a25,
- 0x79098b02, 0xe4eabb81, 0x28123b23,
- 0x69dead38, 0x1574ca16, 0xdf871b62, 0x211c40b7, 0xa51a9ef9,
- 0x0014377b, 0x041e8ac8, 0x09114003,
- 0xbd59e4d2, 0xe3d156d5, 0x4fe876d5, 0x2f91a340, 0x557be8de,
- 0x00eae4a7, 0x0ce5c2ec, 0x4db4bba6,
- 0xe756bdff, 0xdd3369ac, 0xec17b035, 0x06572327, 0x99afc8b0,
- 0x56c8c391, 0x6b65811c, 0x5e146119,
- 0x6e85cb75, 0xbe07c002, 0xc2325577, 0x893ff4ec, 0x5bbfc92d,
- 0xd0ec3b25, 0xb7801ab7, 0x8d6d3b24,
- 0x20c763ef, 0xc366a5fc, 0x9c382880, 0x0ace3205, 0xaac9548a,
- 0xeca1d7c7, 0x041afa32, 0x1d16625a,
- 0x6701902c, 0x9b757a54, 0x31d477f7, 0x9126b031, 0x36cc6fdb,
- 0xc70b8b46, 0xd9e66a48, 0x56e55a79,
- 0x026a4ceb, 0x52437eff, 0x2f8f76b4, 0x0df980a5, 0x8674cde3,
- 0xedda04eb, 0x17a9be04, 0x2c18f4df,
- 0xb7747f9d, 0xab2af7b4, 0xefc34d20, 0x2e096b7c, 0x1741a254,
- 0xe5b6a035, 0x213d42f6, 0x2c1c7c26,
- 0x61c2f50f, 0x6552daf9, 0xd2c231f8, 0x25130f69, 0xd8167fa2,
- 0x0418f2c8, 0x001a96a6, 0x0d1526ab,
- 0x63315c21, 0x5e0a72ec, 0x49bafefd, 0x187908d9, 0x8d0dbd86,
- 0x311170a7, 0x3e9b640c, 0xcc3e10d7,
- 0xd5cad3b6, 0x0caec388, 0xf73001e1, 0x6c728aff, 0x71eae2a1,
- 0x1f9af36e, 0xcfcbd12f, 0xc1de8417,
- 0xac07be6b, 0xcb44a1d8, 0x8b9b0f56, 0x013988c3, 0xb1c52fca,
- 0xb4be31cd, 0xd8782806, 0x12a3a4e2,
- 0x6f7de532, 0x58fd7eb6, 0xd01ee900, 0x24adffc2, 0xf4990fc5,
- 0x9711aac5, 0x001d7b95, 0x82e5e7d2,
- 0x109873f6, 0x00613096, 0xc32d9521, 0xada121ff, 0x29908415,
- 0x7fbb977f, 0xaf9eb3db, 0x29c9ed2a,
- 0x5ce2a465, 0xa730f32c, 0xd0aa3fe8, 0x8a5cc091, 0xd49e2ce7,
- 0x0ce454a9, 0xd60acd86, 0x015f1919,
- 0x77079103, 0xdea03af6, 0x78a8565e, 0xdee356df, 0x21f05cbe,
- 0x8b75e387, 0xb3c50651, 0xb8a5c3ef,
- 0xd8eeb6d2, 0xe523be77, 0xc2154529, 0x2f69efdf, 0xafe67afb,
- 0xf470c4b2, 0xf3e0eb5b, 0xd6cc9876,
- 0x39e4460c, 0x1fda8538, 0x1987832f, 0xca007367, 0xa99144f8,
- 0x296b299e, 0x492fc295, 0x9266beab,
- 0xb5676e69, 0x9bd3ddda, 0xdf7e052f, 0xdb25701c, 0x1b5e51ee,
- 0xf65324e6, 0x6afce36c, 0x0316cc04,
- 0x8644213e, 0xb7dc59d0, 0x7965291f, 0xccd6fd43, 0x41823979,
- 0x932bcdf6, 0xb657c34d, 0x4edfd282,
- 0x7ae5290c, 0x3cb9536b, 0x851e20fe, 0x9833557e, 0x13ecf0b0,
- 0xd3ffb372, 0x3f85c5c1, 0x0aef7ed2
-};
static const u32 s5[256] = {
0x7ec90c04, 0x2c6e74b9, 0x9b0e66df, 0xa6337911, 0xb86a7fff,
0x1dd358f5, 0x44dd9d44, 0x1731167f,
@@ -569,17 +295,21 @@ static const u32 sb8[256] = {
0xeaee6801, 0x8db2a283, 0xea8bf59e
};
-#define F1(D,m,r) ( (I = ((m) + (D))), (I=rol32(I,(r))), \
- (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff]) )
-#define F2(D,m,r) ( (I = ((m) ^ (D))), (I=rol32(I,(r))), \
- (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff]) )
-#define F3(D,m,r) ( (I = ((m) - (D))), (I=rol32(I,(r))), \
- (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff]) )
+#define s1 cast_s1
+#define s2 cast_s2
+#define s3 cast_s3
+#define s4 cast_s4
+#define F1(D, m, r) ((I = ((m) + (D))), (I = rol32(I, (r))), \
+ (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff]))
+#define F2(D, m, r) ((I = ((m) ^ (D))), (I = rol32(I, (r))), \
+ (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff]))
+#define F3(D, m, r) ((I = ((m) - (D))), (I = rol32(I, (r))), \
+ (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff]))
-static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
+
+void __cast5_encrypt(struct cast5_ctx *c, u8 *outbuf, const u8 *inbuf)
{
- struct cast5_ctx *c = crypto_tfm_ctx(tfm);
const __be32 *src = (const __be32 *)inbuf;
__be32 *dst = (__be32 *)outbuf;
u32 l, r, t;
@@ -604,36 +334,23 @@ static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
* Rounds 3, 6, 9, 12, and 15 use f function Type 3.
*/
+ t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]);
+ t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]);
+ t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]);
+ t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]);
+ t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]);
+ t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]);
+ t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]);
+ t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]);
+ t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]);
+ t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]);
+ t = l; l = r; r = t ^ F2(r, Km[10], Kr[10]);
+ t = l; l = r; r = t ^ F3(r, Km[11], Kr[11]);
if (!(c->rr)) {
- t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]);
- t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]);
- t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]);
- t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]);
- t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]);
- t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]);
- t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]);
- t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]);
- t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]);
- t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]);
- t = l; l = r; r = t ^ F2(r, Km[10], Kr[10]);
- t = l; l = r; r = t ^ F3(r, Km[11], Kr[11]);
t = l; l = r; r = t ^ F1(r, Km[12], Kr[12]);
t = l; l = r; r = t ^ F2(r, Km[13], Kr[13]);
t = l; l = r; r = t ^ F3(r, Km[14], Kr[14]);
t = l; l = r; r = t ^ F1(r, Km[15], Kr[15]);
- } else {
- t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]);
- t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]);
- t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]);
- t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]);
- t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]);
- t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]);
- t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]);
- t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]);
- t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]);
- t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]);
- t = l; l = r; r = t ^ F2(r, Km[10], Kr[10]);
- t = l; l = r; r = t ^ F3(r, Km[11], Kr[11]);
}
/* c1...c64 <-- (R16,L16). (Exchange final blocks L16, R16 and
@@ -641,10 +358,15 @@ static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
dst[0] = cpu_to_be32(r);
dst[1] = cpu_to_be32(l);
}
+EXPORT_SYMBOL_GPL(__cast5_encrypt);
-static void cast5_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
+static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
+{
+ __cast5_encrypt(crypto_tfm_ctx(tfm), outbuf, inbuf);
+}
+
+void __cast5_decrypt(struct cast5_ctx *c, u8 *outbuf, const u8 *inbuf)
{
- struct cast5_ctx *c = crypto_tfm_ctx(tfm);
const __be32 *src = (const __be32 *)inbuf;
__be32 *dst = (__be32 *)outbuf;
u32 l, r, t;
@@ -663,38 +385,31 @@ static void cast5_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
t = l; l = r; r = t ^ F3(r, Km[14], Kr[14]);
t = l; l = r; r = t ^ F2(r, Km[13], Kr[13]);
t = l; l = r; r = t ^ F1(r, Km[12], Kr[12]);
- t = l; l = r; r = t ^ F3(r, Km[11], Kr[11]);
- t = l; l = r; r = t ^ F2(r, Km[10], Kr[10]);
- t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]);
- t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]);
- t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]);
- t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]);
- t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]);
- t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]);
- t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]);
- t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]);
- t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]);
- t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]);
- } else {
- t = l; l = r; r = t ^ F3(r, Km[11], Kr[11]);
- t = l; l = r; r = t ^ F2(r, Km[10], Kr[10]);
- t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]);
- t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]);
- t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]);
- t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]);
- t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]);
- t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]);
- t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]);
- t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]);
- t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]);
- t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]);
}
+ t = l; l = r; r = t ^ F3(r, Km[11], Kr[11]);
+ t = l; l = r; r = t ^ F2(r, Km[10], Kr[10]);
+ t = l; l = r; r = t ^ F1(r, Km[9], Kr[9]);
+ t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]);
+ t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]);
+ t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]);
+ t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]);
+ t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]);
+ t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]);
+ t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]);
+ t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]);
+ t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]);
dst[0] = cpu_to_be32(r);
dst[1] = cpu_to_be32(l);
}
+EXPORT_SYMBOL_GPL(__cast5_decrypt);
-static void key_schedule(u32 * x, u32 * z, u32 * k)
+static void cast5_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
+{
+ __cast5_decrypt(crypto_tfm_ctx(tfm), outbuf, inbuf);
+}
+
+static void key_schedule(u32 *x, u32 *z, u32 *k)
{
#define xi(i) ((x[(i)/4] >> (8*(3-((i)%4)))) & 0xff)
@@ -769,7 +484,7 @@ static void key_schedule(u32 * x, u32 * z, u32 * k)
}
-static int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned key_len)
+int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int key_len)
{
struct cast5_ctx *c = crypto_tfm_ctx(tfm);
int i;
@@ -797,20 +512,22 @@ static int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned key_len)
c->Kr[i] = k[i] & 0x1f;
return 0;
}
+EXPORT_SYMBOL_GPL(cast5_setkey);
static struct crypto_alg alg = {
- .cra_name = "cast5",
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
- .cra_blocksize = CAST5_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct cast5_ctx),
- .cra_alignmask = 3,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(alg.cra_list),
- .cra_u = {
+ .cra_name = "cast5",
+ .cra_driver_name = "cast5-generic",
+ .cra_priority = 100,
+ .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
+ .cra_blocksize = CAST5_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct cast5_ctx),
+ .cra_alignmask = 3,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
.cipher = {
.cia_min_keysize = CAST5_MIN_KEY_SIZE,
.cia_max_keysize = CAST5_MAX_KEY_SIZE,
- .cia_setkey = cast5_setkey,
+ .cia_setkey = cast5_setkey,
.cia_encrypt = cast5_encrypt,
.cia_decrypt = cast5_decrypt
}
@@ -832,4 +549,4 @@ module_exit(cast5_mod_fini);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Cast5 Cipher Algorithm");
-
+MODULE_ALIAS("cast5");
diff --git a/crypto/cast6_generic.c b/crypto/cast6_generic.c
new file mode 100644
index 00000000000..de732528a43
--- /dev/null
+++ b/crypto/cast6_generic.c
@@ -0,0 +1,294 @@
+/* Kernel cryptographic api.
+ * cast6.c - Cast6 cipher algorithm [rfc2612].
+ *
+ * CAST-256 (*cast6*) is a DES like Substitution-Permutation Network (SPN)
+ * cryptosystem built upon the CAST-128 (*cast5*) [rfc2144] encryption
+ * algorithm.
+ *
+ * Copyright (C) 2003 Kartikey Mahendra Bhatt <kartik_me@hotmail.com>.
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA
+ */
+
+
+#include <asm/byteorder.h>
+#include <linux/init.h>
+#include <linux/crypto.h>
+#include <linux/module.h>
+#include <linux/errno.h>
+#include <linux/string.h>
+#include <linux/types.h>
+#include <crypto/cast6.h>
+
+#define s1 cast_s1
+#define s2 cast_s2
+#define s3 cast_s3
+#define s4 cast_s4
+
+#define F1(D, r, m) ((I = ((m) + (D))), (I = rol32(I, (r))), \
+ (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff]))
+#define F2(D, r, m) ((I = ((m) ^ (D))), (I = rol32(I, (r))), \
+ (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff]))
+#define F3(D, r, m) ((I = ((m) - (D))), (I = rol32(I, (r))), \
+ (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff]))
+
+static const u32 Tm[24][8] = {
+ { 0x5a827999, 0xc95c653a, 0x383650db, 0xa7103c7c, 0x15ea281d,
+ 0x84c413be, 0xf39dff5f, 0x6277eb00 } ,
+ { 0xd151d6a1, 0x402bc242, 0xaf05ade3, 0x1ddf9984, 0x8cb98525,
+ 0xfb9370c6, 0x6a6d5c67, 0xd9474808 } ,
+ { 0x482133a9, 0xb6fb1f4a, 0x25d50aeb, 0x94aef68c, 0x0388e22d,
+ 0x7262cdce, 0xe13cb96f, 0x5016a510 } ,
+ { 0xbef090b1, 0x2dca7c52, 0x9ca467f3, 0x0b7e5394, 0x7a583f35,
+ 0xe9322ad6, 0x580c1677, 0xc6e60218 } ,
+ { 0x35bfedb9, 0xa499d95a, 0x1373c4fb, 0x824db09c, 0xf1279c3d,
+ 0x600187de, 0xcedb737f, 0x3db55f20 } ,
+ { 0xac8f4ac1, 0x1b693662, 0x8a432203, 0xf91d0da4, 0x67f6f945,
+ 0xd6d0e4e6, 0x45aad087, 0xb484bc28 } ,
+ { 0x235ea7c9, 0x9238936a, 0x01127f0b, 0x6fec6aac, 0xdec6564d,
+ 0x4da041ee, 0xbc7a2d8f, 0x2b541930 } ,
+ { 0x9a2e04d1, 0x0907f072, 0x77e1dc13, 0xe6bbc7b4, 0x5595b355,
+ 0xc46f9ef6, 0x33498a97, 0xa2237638 } ,
+ { 0x10fd61d9, 0x7fd74d7a, 0xeeb1391b, 0x5d8b24bc, 0xcc65105d,
+ 0x3b3efbfe, 0xaa18e79f, 0x18f2d340 } ,
+ { 0x87ccbee1, 0xf6a6aa82, 0x65809623, 0xd45a81c4, 0x43346d65,
+ 0xb20e5906, 0x20e844a7, 0x8fc23048 } ,
+ { 0xfe9c1be9, 0x6d76078a, 0xdc4ff32b, 0x4b29decc, 0xba03ca6d,
+ 0x28ddb60e, 0x97b7a1af, 0x06918d50 } ,
+ { 0x756b78f1, 0xe4456492, 0x531f5033, 0xc1f93bd4, 0x30d32775,
+ 0x9fad1316, 0x0e86feb7, 0x7d60ea58 } ,
+ { 0xec3ad5f9, 0x5b14c19a, 0xc9eead3b, 0x38c898dc, 0xa7a2847d,
+ 0x167c701e, 0x85565bbf, 0xf4304760 } ,
+ { 0x630a3301, 0xd1e41ea2, 0x40be0a43, 0xaf97f5e4, 0x1e71e185,
+ 0x8d4bcd26, 0xfc25b8c7, 0x6affa468 } ,
+ { 0xd9d99009, 0x48b37baa, 0xb78d674b, 0x266752ec, 0x95413e8d,
+ 0x041b2a2e, 0x72f515cf, 0xe1cf0170 } ,
+ { 0x50a8ed11, 0xbf82d8b2, 0x2e5cc453, 0x9d36aff4, 0x0c109b95,
+ 0x7aea8736, 0xe9c472d7, 0x589e5e78 } ,
+ { 0xc7784a19, 0x365235ba, 0xa52c215b, 0x14060cfc, 0x82dff89d,
+ 0xf1b9e43e, 0x6093cfdf, 0xcf6dbb80 } ,
+ { 0x3e47a721, 0xad2192c2, 0x1bfb7e63, 0x8ad56a04, 0xf9af55a5,
+ 0x68894146, 0xd7632ce7, 0x463d1888 } ,
+ { 0xb5170429, 0x23f0efca, 0x92cadb6b, 0x01a4c70c, 0x707eb2ad,
+ 0xdf589e4e, 0x4e3289ef, 0xbd0c7590 } ,
+ { 0x2be66131, 0x9ac04cd2, 0x099a3873, 0x78742414, 0xe74e0fb5,
+ 0x5627fb56, 0xc501e6f7, 0x33dbd298 } ,
+ { 0xa2b5be39, 0x118fa9da, 0x8069957b, 0xef43811c, 0x5e1d6cbd,
+ 0xccf7585e, 0x3bd143ff, 0xaaab2fa0 } ,
+ { 0x19851b41, 0x885f06e2, 0xf738f283, 0x6612de24, 0xd4ecc9c5,
+ 0x43c6b566, 0xb2a0a107, 0x217a8ca8 } ,
+ { 0x90547849, 0xff2e63ea, 0x6e084f8b, 0xdce23b2c, 0x4bbc26cd,
+ 0xba96126e, 0x296ffe0f, 0x9849e9b0 } ,
+ { 0x0723d551, 0x75fdc0f2, 0xe4d7ac93, 0x53b19834, 0xc28b83d5,
+ 0x31656f76, 0xa03f5b17, 0x0f1946b8 }
+};
+
+static const u8 Tr[4][8] = {
+ { 0x13, 0x04, 0x15, 0x06, 0x17, 0x08, 0x19, 0x0a } ,
+ { 0x1b, 0x0c, 0x1d, 0x0e, 0x1f, 0x10, 0x01, 0x12 } ,
+ { 0x03, 0x14, 0x05, 0x16, 0x07, 0x18, 0x09, 0x1a } ,
+ { 0x0b, 0x1c, 0x0d, 0x1e, 0x0f, 0x00, 0x11, 0x02 }
+};
+
+/* forward octave */
+static inline void W(u32 *key, unsigned int i)
+{
+ u32 I;
+ key[6] ^= F1(key[7], Tr[i % 4][0], Tm[i][0]);
+ key[5] ^= F2(key[6], Tr[i % 4][1], Tm[i][1]);
+ key[4] ^= F3(key[5], Tr[i % 4][2], Tm[i][2]);
+ key[3] ^= F1(key[4], Tr[i % 4][3], Tm[i][3]);
+ key[2] ^= F2(key[3], Tr[i % 4][4], Tm[i][4]);
+ key[1] ^= F3(key[2], Tr[i % 4][5], Tm[i][5]);
+ key[0] ^= F1(key[1], Tr[i % 4][6], Tm[i][6]);
+ key[7] ^= F2(key[0], Tr[i % 4][7], Tm[i][7]);
+}
+
+int __cast6_setkey(struct cast6_ctx *c, const u8 *in_key,
+ unsigned key_len, u32 *flags)
+{
+ int i;
+ u32 key[8];
+ __be32 p_key[8]; /* padded key */
+
+ if (key_len % 4 != 0) {
+ *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
+ return -EINVAL;
+ }
+
+ memset(p_key, 0, 32);
+ memcpy(p_key, in_key, key_len);
+
+ key[0] = be32_to_cpu(p_key[0]); /* A */
+ key[1] = be32_to_cpu(p_key[1]); /* B */
+ key[2] = be32_to_cpu(p_key[2]); /* C */
+ key[3] = be32_to_cpu(p_key[3]); /* D */
+ key[4] = be32_to_cpu(p_key[4]); /* E */
+ key[5] = be32_to_cpu(p_key[5]); /* F */
+ key[6] = be32_to_cpu(p_key[6]); /* G */
+ key[7] = be32_to_cpu(p_key[7]); /* H */
+
+ for (i = 0; i < 12; i++) {
+ W(key, 2 * i);
+ W(key, 2 * i + 1);
+
+ c->Kr[i][0] = key[0] & 0x1f;
+ c->Kr[i][1] = key[2] & 0x1f;
+ c->Kr[i][2] = key[4] & 0x1f;
+ c->Kr[i][3] = key[6] & 0x1f;
+
+ c->Km[i][0] = key[7];
+ c->Km[i][1] = key[5];
+ c->Km[i][2] = key[3];
+ c->Km[i][3] = key[1];
+ }
+
+ return 0;
+}
+EXPORT_SYMBOL_GPL(__cast6_setkey);
+
+int cast6_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
+{
+ return __cast6_setkey(crypto_tfm_ctx(tfm), key, keylen,
+ &tfm->crt_flags);
+}
+EXPORT_SYMBOL_GPL(cast6_setkey);
+
+/*forward quad round*/
+static inline void Q(u32 *block, u8 *Kr, u32 *Km)
+{
+ u32 I;
+ block[2] ^= F1(block[3], Kr[0], Km[0]);
+ block[1] ^= F2(block[2], Kr[1], Km[1]);
+ block[0] ^= F3(block[1], Kr[2], Km[2]);
+ block[3] ^= F1(block[0], Kr[3], Km[3]);
+}
+
+/*reverse quad round*/
+static inline void QBAR(u32 *block, u8 *Kr, u32 *Km)
+{
+ u32 I;
+ block[3] ^= F1(block[0], Kr[3], Km[3]);
+ block[0] ^= F3(block[1], Kr[2], Km[2]);
+ block[1] ^= F2(block[2], Kr[1], Km[1]);
+ block[2] ^= F1(block[3], Kr[0], Km[0]);
+}
+
+void __cast6_encrypt(struct cast6_ctx *c, u8 *outbuf, const u8 *inbuf)
+{
+ const __be32 *src = (const __be32 *)inbuf;
+ __be32 *dst = (__be32 *)outbuf;
+ u32 block[4];
+ u32 *Km;
+ u8 *Kr;
+
+ block[0] = be32_to_cpu(src[0]);
+ block[1] = be32_to_cpu(src[1]);
+ block[2] = be32_to_cpu(src[2]);
+ block[3] = be32_to_cpu(src[3]);
+
+ Km = c->Km[0]; Kr = c->Kr[0]; Q(block, Kr, Km);
+ Km = c->Km[1]; Kr = c->Kr[1]; Q(block, Kr, Km);
+ Km = c->Km[2]; Kr = c->Kr[2]; Q(block, Kr, Km);
+ Km = c->Km[3]; Kr = c->Kr[3]; Q(block, Kr, Km);
+ Km = c->Km[4]; Kr = c->Kr[4]; Q(block, Kr, Km);
+ Km = c->Km[5]; Kr = c->Kr[5]; Q(block, Kr, Km);
+ Km = c->Km[6]; Kr = c->Kr[6]; QBAR(block, Kr, Km);
+ Km = c->Km[7]; Kr = c->Kr[7]; QBAR(block, Kr, Km);
+ Km = c->Km[8]; Kr = c->Kr[8]; QBAR(block, Kr, Km);
+ Km = c->Km[9]; Kr = c->Kr[9]; QBAR(block, Kr, Km);
+ Km = c->Km[10]; Kr = c->Kr[10]; QBAR(block, Kr, Km);
+ Km = c->Km[11]; Kr = c->Kr[11]; QBAR(block, Kr, Km);
+
+ dst[0] = cpu_to_be32(block[0]);
+ dst[1] = cpu_to_be32(block[1]);
+ dst[2] = cpu_to_be32(block[2]);
+ dst[3] = cpu_to_be32(block[3]);
+}
+EXPORT_SYMBOL_GPL(__cast6_encrypt);
+
+static void cast6_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
+{
+ __cast6_encrypt(crypto_tfm_ctx(tfm), outbuf, inbuf);
+}
+
+void __cast6_decrypt(struct cast6_ctx *c, u8 *outbuf, const u8 *inbuf)
+{
+ const __be32 *src = (const __be32 *)inbuf;
+ __be32 *dst = (__be32 *)outbuf;
+ u32 block[4];
+ u32 *Km;
+ u8 *Kr;
+
+ block[0] = be32_to_cpu(src[0]);
+ block[1] = be32_to_cpu(src[1]);
+ block[2] = be32_to_cpu(src[2]);
+ block[3] = be32_to_cpu(src[3]);
+
+ Km = c->Km[11]; Kr = c->Kr[11]; Q(block, Kr, Km);
+ Km = c->Km[10]; Kr = c->Kr[10]; Q(block, Kr, Km);
+ Km = c->Km[9]; Kr = c->Kr[9]; Q(block, Kr, Km);
+ Km = c->Km[8]; Kr = c->Kr[8]; Q(block, Kr, Km);
+ Km = c->Km[7]; Kr = c->Kr[7]; Q(block, Kr, Km);
+ Km = c->Km[6]; Kr = c->Kr[6]; Q(block, Kr, Km);
+ Km = c->Km[5]; Kr = c->Kr[5]; QBAR(block, Kr, Km);
+ Km = c->Km[4]; Kr = c->Kr[4]; QBAR(block, Kr, Km);
+ Km = c->Km[3]; Kr = c->Kr[3]; QBAR(block, Kr, Km);
+ Km = c->Km[2]; Kr = c->Kr[2]; QBAR(block, Kr, Km);
+ Km = c->Km[1]; Kr = c->Kr[1]; QBAR(block, Kr, Km);
+ Km = c->Km[0]; Kr = c->Kr[0]; QBAR(block, Kr, Km);
+
+ dst[0] = cpu_to_be32(block[0]);
+ dst[1] = cpu_to_be32(block[1]);
+ dst[2] = cpu_to_be32(block[2]);
+ dst[3] = cpu_to_be32(block[3]);
+}
+EXPORT_SYMBOL_GPL(__cast6_decrypt);
+
+static void cast6_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
+{
+ __cast6_decrypt(crypto_tfm_ctx(tfm), outbuf, inbuf);
+}
+
+static struct crypto_alg alg = {
+ .cra_name = "cast6",
+ .cra_driver_name = "cast6-generic",
+ .cra_priority = 100,
+ .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
+ .cra_blocksize = CAST6_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct cast6_ctx),
+ .cra_alignmask = 3,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
+ .cipher = {
+ .cia_min_keysize = CAST6_MIN_KEY_SIZE,
+ .cia_max_keysize = CAST6_MAX_KEY_SIZE,
+ .cia_setkey = cast6_setkey,
+ .cia_encrypt = cast6_encrypt,
+ .cia_decrypt = cast6_decrypt}
+ }
+};
+
+static int __init cast6_mod_init(void)
+{
+ return crypto_register_alg(&alg);
+}
+
+static void __exit cast6_mod_fini(void)
+{
+ crypto_unregister_alg(&alg);
+}
+
+module_init(cast6_mod_init);
+module_exit(cast6_mod_fini);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Cast6 Cipher Algorithm");
+MODULE_ALIAS("cast6");
diff --git a/crypto/cast6.c b/crypto/cast_common.c
index 007d02beed6..117dd8250f2 100644
--- a/crypto/cast6.c
+++ b/crypto/cast_common.c
@@ -1,48 +1,21 @@
-/* Kernel cryptographic api.
- * cast6.c - Cast6 cipher algorithm [rfc2612].
+/*
+ * Common lookup tables for CAST-128 (cast5) and CAST-256 (cast6)
*
- * CAST-256 (*cast6*) is a DES like Substitution-Permutation Network (SPN)
- * cryptosystem built upon the CAST-128 (*cast5*) [rfc2144] encryption
- * algorithm.
- *
- * Copyright (C) 2003 Kartikey Mahendra Bhatt <kartik_me@hotmail.com>.
+ * Copyright © 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
+ * Copyright © 2003 Kartikey Mahendra Bhatt <kartik_me@hotmail.com>
+ * Copyright © 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA
+ *
*/
-
-#include <asm/byteorder.h>
-#include <linux/init.h>
-#include <linux/crypto.h>
#include <linux/module.h>
-#include <linux/errno.h>
-#include <linux/string.h>
-#include <linux/types.h>
-
-#define CAST6_BLOCK_SIZE 16
-#define CAST6_MIN_KEY_SIZE 16
-#define CAST6_MAX_KEY_SIZE 32
+#include <crypto/cast_common.h>
-struct cast6_ctx {
- u32 Km[12][4];
- u8 Kr[12][4];
-};
-
-#define F1(D,r,m) ( (I = ((m) + (D))), (I=rol32(I,(r))), \
- (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff]) )
-#define F2(D,r,m) ( (I = ((m) ^ (D))), (I=rol32(I,(r))), \
- (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff]) )
-#define F3(D,r,m) ( (I = ((m) - (D))), (I=rol32(I,(r))), \
- (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff]) )
-
-static const u32 s1[256] = {
+__visible const u32 cast_s1[256] = {
0x30fb40d4, 0x9fa0ff0b, 0x6beccd2f, 0x3f258c7a, 0x1e213f2f,
0x9c004dd3, 0x6003e540, 0xcf9fc949,
0xbfd4af27, 0x88bbbdb5, 0xe2034090, 0x98d09675, 0x6e63a0e0,
@@ -108,8 +81,9 @@ static const u32 s1[256] = {
0x1a69e783, 0x02cc4843, 0xa2f7c579, 0x429ef47d, 0x427b169c,
0x5ac9f049, 0xdd8f0f00, 0x5c8165bf
};
+EXPORT_SYMBOL_GPL(cast_s1);
-static const u32 s2[256] = {
+__visible const u32 cast_s2[256] = {
0x1f201094, 0xef0ba75b, 0x69e3cf7e, 0x393f4380, 0xfe61cf7a,
0xeec5207a, 0x55889c94, 0x72fc0651,
0xada7ef79, 0x4e1d7235, 0xd55a63ce, 0xde0436ba, 0x99c430ef,
@@ -175,8 +149,9 @@ static const u32 s2[256] = {
0x43d79572, 0x7e6dd07c, 0x06dfdf1e, 0x6c6cc4ef, 0x7160a539,
0x73bfbe70, 0x83877605, 0x4523ecf1
};
+EXPORT_SYMBOL_GPL(cast_s2);
-static const u32 s3[256] = {
+__visible const u32 cast_s3[256] = {
0x8defc240, 0x25fa5d9f, 0xeb903dbf, 0xe810c907, 0x47607fff,
0x369fe44b, 0x8c1fc644, 0xaececa90,
0xbeb1f9bf, 0xeefbcaea, 0xe8cf1950, 0x51df07ae, 0x920e8806,
@@ -242,8 +217,9 @@ static const u32 s3[256] = {
0xf7baefd5, 0x4142ed9c, 0xa4315c11, 0x83323ec5, 0xdfef4636,
0xa133c501, 0xe9d3531c, 0xee353783
};
+EXPORT_SYMBOL_GPL(cast_s3);
-static const u32 s4[256] = {
+__visible const u32 cast_s4[256] = {
0x9db30420, 0x1fb6e9de, 0xa7be7bef, 0xd273a298, 0x4a4f7bdb,
0x64ad8c57, 0x85510443, 0xfa020ed1,
0x7e287aff, 0xe60fb663, 0x095f35a1, 0x79ebf120, 0xfd059d43,
@@ -309,237 +285,6 @@ static const u32 s4[256] = {
0x7ae5290c, 0x3cb9536b, 0x851e20fe, 0x9833557e, 0x13ecf0b0,
0xd3ffb372, 0x3f85c5c1, 0x0aef7ed2
};
-
-static const u32 Tm[24][8] = {
- { 0x5a827999, 0xc95c653a, 0x383650db, 0xa7103c7c, 0x15ea281d,
- 0x84c413be, 0xf39dff5f, 0x6277eb00 } ,
- { 0xd151d6a1, 0x402bc242, 0xaf05ade3, 0x1ddf9984, 0x8cb98525,
- 0xfb9370c6, 0x6a6d5c67, 0xd9474808 } ,
- { 0x482133a9, 0xb6fb1f4a, 0x25d50aeb, 0x94aef68c, 0x0388e22d,
- 0x7262cdce, 0xe13cb96f, 0x5016a510 } ,
- { 0xbef090b1, 0x2dca7c52, 0x9ca467f3, 0x0b7e5394, 0x7a583f35,
- 0xe9322ad6, 0x580c1677, 0xc6e60218 } ,
- { 0x35bfedb9, 0xa499d95a, 0x1373c4fb, 0x824db09c, 0xf1279c3d,
- 0x600187de, 0xcedb737f, 0x3db55f20 } ,
- { 0xac8f4ac1, 0x1b693662, 0x8a432203, 0xf91d0da4, 0x67f6f945,
- 0xd6d0e4e6, 0x45aad087, 0xb484bc28 } ,
- { 0x235ea7c9, 0x9238936a, 0x01127f0b, 0x6fec6aac, 0xdec6564d,
- 0x4da041ee, 0xbc7a2d8f, 0x2b541930 } ,
- { 0x9a2e04d1, 0x0907f072, 0x77e1dc13, 0xe6bbc7b4, 0x5595b355,
- 0xc46f9ef6, 0x33498a97, 0xa2237638 } ,
- { 0x10fd61d9, 0x7fd74d7a, 0xeeb1391b, 0x5d8b24bc, 0xcc65105d,
- 0x3b3efbfe, 0xaa18e79f, 0x18f2d340 } ,
- { 0x87ccbee1, 0xf6a6aa82, 0x65809623, 0xd45a81c4, 0x43346d65,
- 0xb20e5906, 0x20e844a7, 0x8fc23048 } ,
- { 0xfe9c1be9, 0x6d76078a, 0xdc4ff32b, 0x4b29decc, 0xba03ca6d,
- 0x28ddb60e, 0x97b7a1af, 0x06918d50 } ,
- { 0x756b78f1, 0xe4456492, 0x531f5033, 0xc1f93bd4, 0x30d32775,
- 0x9fad1316, 0x0e86feb7, 0x7d60ea58 } ,
- { 0xec3ad5f9, 0x5b14c19a, 0xc9eead3b, 0x38c898dc, 0xa7a2847d,
- 0x167c701e, 0x85565bbf, 0xf4304760 } ,
- { 0x630a3301, 0xd1e41ea2, 0x40be0a43, 0xaf97f5e4, 0x1e71e185,
- 0x8d4bcd26, 0xfc25b8c7, 0x6affa468 } ,
- { 0xd9d99009, 0x48b37baa, 0xb78d674b, 0x266752ec, 0x95413e8d,
- 0x041b2a2e, 0x72f515cf, 0xe1cf0170 } ,
- { 0x50a8ed11, 0xbf82d8b2, 0x2e5cc453, 0x9d36aff4, 0x0c109b95,
- 0x7aea8736, 0xe9c472d7, 0x589e5e78 } ,
- { 0xc7784a19, 0x365235ba, 0xa52c215b, 0x14060cfc, 0x82dff89d,
- 0xf1b9e43e, 0x6093cfdf, 0xcf6dbb80 } ,
- { 0x3e47a721, 0xad2192c2, 0x1bfb7e63, 0x8ad56a04, 0xf9af55a5,
- 0x68894146, 0xd7632ce7, 0x463d1888 } ,
- { 0xb5170429, 0x23f0efca, 0x92cadb6b, 0x01a4c70c, 0x707eb2ad,
- 0xdf589e4e, 0x4e3289ef, 0xbd0c7590 } ,
- { 0x2be66131, 0x9ac04cd2, 0x099a3873, 0x78742414, 0xe74e0fb5,
- 0x5627fb56, 0xc501e6f7, 0x33dbd298 } ,
- { 0xa2b5be39, 0x118fa9da, 0x8069957b, 0xef43811c, 0x5e1d6cbd,
- 0xccf7585e, 0x3bd143ff, 0xaaab2fa0 } ,
- { 0x19851b41, 0x885f06e2, 0xf738f283, 0x6612de24, 0xd4ecc9c5,
- 0x43c6b566, 0xb2a0a107, 0x217a8ca8 } ,
- { 0x90547849, 0xff2e63ea, 0x6e084f8b, 0xdce23b2c, 0x4bbc26cd,
- 0xba96126e, 0x296ffe0f, 0x9849e9b0 } ,
- { 0x0723d551, 0x75fdc0f2, 0xe4d7ac93, 0x53b19834, 0xc28b83d5,
- 0x31656f76, 0xa03f5b17, 0x0f1946b8 }
-};
-
-static const u8 Tr[4][8] = {
- { 0x13, 0x04, 0x15, 0x06, 0x17, 0x08, 0x19, 0x0a } ,
- { 0x1b, 0x0c, 0x1d, 0x0e, 0x1f, 0x10, 0x01, 0x12 } ,
- { 0x03, 0x14, 0x05, 0x16, 0x07, 0x18, 0x09, 0x1a } ,
- { 0x0b, 0x1c, 0x0d, 0x1e, 0x0f, 0x00, 0x11, 0x02 }
-};
-
-/* forward octave */
-static void W(u32 *key, unsigned int i) {
- u32 I;
- key[6] ^= F1(key[7], Tr[i % 4][0], Tm[i][0]);
- key[5] ^= F2(key[6], Tr[i % 4][1], Tm[i][1]);
- key[4] ^= F3(key[5], Tr[i % 4][2], Tm[i][2]);
- key[3] ^= F1(key[4], Tr[i % 4][3], Tm[i][3]);
- key[2] ^= F2(key[3], Tr[i % 4][4], Tm[i][4]);
- key[1] ^= F3(key[2], Tr[i % 4][5], Tm[i][5]);
- key[0] ^= F1(key[1], Tr[i % 4][6], Tm[i][6]);
- key[7] ^= F2(key[0], Tr[i % 4][7], Tm[i][7]);
-}
-
-static int cast6_setkey(struct crypto_tfm *tfm, const u8 *in_key,
- unsigned key_len)
-{
- int i;
- u32 key[8];
- __be32 p_key[8]; /* padded key */
- struct cast6_ctx *c = crypto_tfm_ctx(tfm);
- u32 *flags = &tfm->crt_flags;
-
- if (key_len % 4 != 0) {
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
- return -EINVAL;
- }
-
- memset (p_key, 0, 32);
- memcpy (p_key, in_key, key_len);
-
- key[0] = be32_to_cpu(p_key[0]); /* A */
- key[1] = be32_to_cpu(p_key[1]); /* B */
- key[2] = be32_to_cpu(p_key[2]); /* C */
- key[3] = be32_to_cpu(p_key[3]); /* D */
- key[4] = be32_to_cpu(p_key[4]); /* E */
- key[5] = be32_to_cpu(p_key[5]); /* F */
- key[6] = be32_to_cpu(p_key[6]); /* G */
- key[7] = be32_to_cpu(p_key[7]); /* H */
-
-
-
- for (i = 0; i < 12; i++) {
- W (key, 2 * i);
- W (key, 2 * i + 1);
-
- c->Kr[i][0] = key[0] & 0x1f;
- c->Kr[i][1] = key[2] & 0x1f;
- c->Kr[i][2] = key[4] & 0x1f;
- c->Kr[i][3] = key[6] & 0x1f;
-
- c->Km[i][0] = key[7];
- c->Km[i][1] = key[5];
- c->Km[i][2] = key[3];
- c->Km[i][3] = key[1];
- }
-
- return 0;
-}
-
-/*forward quad round*/
-static void Q (u32 * block, u8 * Kr, u32 * Km) {
- u32 I;
- block[2] ^= F1(block[3], Kr[0], Km[0]);
- block[1] ^= F2(block[2], Kr[1], Km[1]);
- block[0] ^= F3(block[1], Kr[2], Km[2]);
- block[3] ^= F1(block[0], Kr[3], Km[3]);
-}
-
-/*reverse quad round*/
-static void QBAR (u32 * block, u8 * Kr, u32 * Km) {
- u32 I;
- block[3] ^= F1(block[0], Kr[3], Km[3]);
- block[0] ^= F3(block[1], Kr[2], Km[2]);
- block[1] ^= F2(block[2], Kr[1], Km[1]);
- block[2] ^= F1(block[3], Kr[0], Km[0]);
-}
-
-static void cast6_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
-{
- struct cast6_ctx *c = crypto_tfm_ctx(tfm);
- const __be32 *src = (const __be32 *)inbuf;
- __be32 *dst = (__be32 *)outbuf;
- u32 block[4];
- u32 * Km;
- u8 * Kr;
-
- block[0] = be32_to_cpu(src[0]);
- block[1] = be32_to_cpu(src[1]);
- block[2] = be32_to_cpu(src[2]);
- block[3] = be32_to_cpu(src[3]);
-
- Km = c->Km[0]; Kr = c->Kr[0]; Q (block, Kr, Km);
- Km = c->Km[1]; Kr = c->Kr[1]; Q (block, Kr, Km);
- Km = c->Km[2]; Kr = c->Kr[2]; Q (block, Kr, Km);
- Km = c->Km[3]; Kr = c->Kr[3]; Q (block, Kr, Km);
- Km = c->Km[4]; Kr = c->Kr[4]; Q (block, Kr, Km);
- Km = c->Km[5]; Kr = c->Kr[5]; Q (block, Kr, Km);
- Km = c->Km[6]; Kr = c->Kr[6]; QBAR (block, Kr, Km);
- Km = c->Km[7]; Kr = c->Kr[7]; QBAR (block, Kr, Km);
- Km = c->Km[8]; Kr = c->Kr[8]; QBAR (block, Kr, Km);
- Km = c->Km[9]; Kr = c->Kr[9]; QBAR (block, Kr, Km);
- Km = c->Km[10]; Kr = c->Kr[10]; QBAR (block, Kr, Km);
- Km = c->Km[11]; Kr = c->Kr[11]; QBAR (block, Kr, Km);
-
- dst[0] = cpu_to_be32(block[0]);
- dst[1] = cpu_to_be32(block[1]);
- dst[2] = cpu_to_be32(block[2]);
- dst[3] = cpu_to_be32(block[3]);
-}
-
-static void cast6_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) {
- struct cast6_ctx * c = crypto_tfm_ctx(tfm);
- const __be32 *src = (const __be32 *)inbuf;
- __be32 *dst = (__be32 *)outbuf;
- u32 block[4];
- u32 * Km;
- u8 * Kr;
-
- block[0] = be32_to_cpu(src[0]);
- block[1] = be32_to_cpu(src[1]);
- block[2] = be32_to_cpu(src[2]);
- block[3] = be32_to_cpu(src[3]);
-
- Km = c->Km[11]; Kr = c->Kr[11]; Q (block, Kr, Km);
- Km = c->Km[10]; Kr = c->Kr[10]; Q (block, Kr, Km);
- Km = c->Km[9]; Kr = c->Kr[9]; Q (block, Kr, Km);
- Km = c->Km[8]; Kr = c->Kr[8]; Q (block, Kr, Km);
- Km = c->Km[7]; Kr = c->Kr[7]; Q (block, Kr, Km);
- Km = c->Km[6]; Kr = c->Kr[6]; Q (block, Kr, Km);
- Km = c->Km[5]; Kr = c->Kr[5]; QBAR (block, Kr, Km);
- Km = c->Km[4]; Kr = c->Kr[4]; QBAR (block, Kr, Km);
- Km = c->Km[3]; Kr = c->Kr[3]; QBAR (block, Kr, Km);
- Km = c->Km[2]; Kr = c->Kr[2]; QBAR (block, Kr, Km);
- Km = c->Km[1]; Kr = c->Kr[1]; QBAR (block, Kr, Km);
- Km = c->Km[0]; Kr = c->Kr[0]; QBAR (block, Kr, Km);
-
- dst[0] = cpu_to_be32(block[0]);
- dst[1] = cpu_to_be32(block[1]);
- dst[2] = cpu_to_be32(block[2]);
- dst[3] = cpu_to_be32(block[3]);
-}
-
-static struct crypto_alg alg = {
- .cra_name = "cast6",
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
- .cra_blocksize = CAST6_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct cast6_ctx),
- .cra_alignmask = 3,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(alg.cra_list),
- .cra_u = {
- .cipher = {
- .cia_min_keysize = CAST6_MIN_KEY_SIZE,
- .cia_max_keysize = CAST6_MAX_KEY_SIZE,
- .cia_setkey = cast6_setkey,
- .cia_encrypt = cast6_encrypt,
- .cia_decrypt = cast6_decrypt}
- }
-};
-
-static int __init cast6_mod_init(void)
-{
- return crypto_register_alg(&alg);
-}
-
-static void __exit cast6_mod_fini(void)
-{
- crypto_unregister_alg(&alg);
-}
-
-module_init(cast6_mod_init);
-module_exit(cast6_mod_fini);
+EXPORT_SYMBOL_GPL(cast_s4);
MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION("Cast6 Cipher Algorithm");
diff --git a/crypto/ccm.c b/crypto/ccm.c
index c36d654cf56..1df84217f7c 100644
--- a/crypto/ccm.c
+++ b/crypto/ccm.c
@@ -216,12 +216,12 @@ static void get_data_to_compute(struct crypto_cipher *tfm,
scatterwalk_start(&walk, sg_next(walk.sg));
n = scatterwalk_clamp(&walk, len);
}
- data_src = scatterwalk_map(&walk, 0);
+ data_src = scatterwalk_map(&walk);
compute_mac(tfm, data_src, n, pctx);
len -= n;
- scatterwalk_unmap(data_src, 0);
+ scatterwalk_unmap(data_src);
scatterwalk_advance(&walk, n);
scatterwalk_done(&walk, 0, len);
if (len)
@@ -271,7 +271,8 @@ static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
}
/* compute plaintext into mac */
- get_data_to_compute(cipher, pctx, plain, cryptlen);
+ if (cryptlen)
+ get_data_to_compute(cipher, pctx, plain, cryptlen);
out:
return err;
@@ -363,7 +364,7 @@ static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
if (!err) {
err = crypto_ccm_auth(req, req->dst, cryptlen);
- if (!err && memcmp(pctx->auth_tag, pctx->odata, authsize))
+ if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize))
err = -EBADMSG;
}
aead_request_complete(req, err);
@@ -422,7 +423,7 @@ static int crypto_ccm_decrypt(struct aead_request *req)
return err;
/* verify */
- if (memcmp(authtag, odata, authsize))
+ if (crypto_memneq(authtag, odata, authsize))
return -EBADMSG;
return err;
@@ -484,18 +485,16 @@ static struct crypto_instance *crypto_ccm_alloc_common(struct rtattr **tb,
int err;
algt = crypto_get_attr_type(tb);
- err = PTR_ERR(algt);
if (IS_ERR(algt))
- return ERR_PTR(err);
+ return ERR_CAST(algt);
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return ERR_PTR(-EINVAL);
cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER,
CRYPTO_ALG_TYPE_MASK);
- err = PTR_ERR(cipher);
if (IS_ERR(cipher))
- return ERR_PTR(err);
+ return ERR_CAST(cipher);
err = -EINVAL;
if (cipher->cra_blocksize != 16)
@@ -573,15 +572,13 @@ out_put_cipher:
static struct crypto_instance *crypto_ccm_alloc(struct rtattr **tb)
{
- int err;
const char *cipher_name;
char ctr_name[CRYPTO_MAX_ALG_NAME];
char full_name[CRYPTO_MAX_ALG_NAME];
cipher_name = crypto_attr_alg_name(tb[1]);
- err = PTR_ERR(cipher_name);
if (IS_ERR(cipher_name))
- return ERR_PTR(err);
+ return ERR_CAST(cipher_name);
if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
cipher_name) >= CRYPTO_MAX_ALG_NAME)
@@ -612,20 +609,17 @@ static struct crypto_template crypto_ccm_tmpl = {
static struct crypto_instance *crypto_ccm_base_alloc(struct rtattr **tb)
{
- int err;
const char *ctr_name;
const char *cipher_name;
char full_name[CRYPTO_MAX_ALG_NAME];
ctr_name = crypto_attr_alg_name(tb[1]);
- err = PTR_ERR(ctr_name);
if (IS_ERR(ctr_name))
- return ERR_PTR(err);
+ return ERR_CAST(ctr_name);
cipher_name = crypto_attr_alg_name(tb[2]);
- err = PTR_ERR(cipher_name);
if (IS_ERR(cipher_name))
- return ERR_PTR(err);
+ return ERR_CAST(cipher_name);
if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
@@ -760,17 +754,15 @@ static struct crypto_instance *crypto_rfc4309_alloc(struct rtattr **tb)
int err;
algt = crypto_get_attr_type(tb);
- err = PTR_ERR(algt);
if (IS_ERR(algt))
- return ERR_PTR(err);
+ return ERR_CAST(algt);
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return ERR_PTR(-EINVAL);
ccm_name = crypto_attr_alg_name(tb[1]);
- err = PTR_ERR(ccm_name);
if (IS_ERR(ccm_name))
- return ERR_PTR(err);
+ return ERR_CAST(ccm_name);
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
if (!inst)
diff --git a/crypto/chainiv.c b/crypto/chainiv.c
index 7c37a497b86..9c294c8f9a0 100644
--- a/crypto/chainiv.c
+++ b/crypto/chainiv.c
@@ -15,6 +15,7 @@
#include <crypto/internal/skcipher.h>
#include <crypto/rng.h>
+#include <crypto/crypto_wq.h>
#include <linux/err.h>
#include <linux/init.h>
#include <linux/kernel.h>
@@ -125,7 +126,7 @@ static int async_chainiv_schedule_work(struct async_chainiv_ctx *ctx)
int err = ctx->err;
if (!ctx->queue.qlen) {
- smp_mb__before_clear_bit();
+ smp_mb__before_atomic();
clear_bit(CHAINIV_STATE_INUSE, &ctx->state);
if (!ctx->queue.qlen ||
@@ -133,7 +134,7 @@ static int async_chainiv_schedule_work(struct async_chainiv_ctx *ctx)
goto out;
}
- queued = schedule_work(&ctx->postponed);
+ queued = queue_work(kcrypto_wq, &ctx->postponed);
BUG_ON(!queued);
out:
@@ -290,9 +291,8 @@ static struct crypto_instance *chainiv_alloc(struct rtattr **tb)
int err;
algt = crypto_get_attr_type(tb);
- err = PTR_ERR(algt);
if (IS_ERR(algt))
- return ERR_PTR(err);
+ return ERR_CAST(algt);
err = crypto_get_default_rng();
if (err)
diff --git a/crypto/cipher.c b/crypto/cipher.c
index 9a1a7316eea..39541e0e537 100644
--- a/crypto/cipher.c
+++ b/crypto/cipher.c
@@ -8,7 +8,7 @@
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
+ * Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
diff --git a/crypto/cmac.c b/crypto/cmac.c
new file mode 100644
index 00000000000..50880cf17fa
--- /dev/null
+++ b/crypto/cmac.c
@@ -0,0 +1,315 @@
+/*
+ * CMAC: Cipher Block Mode for Authentication
+ *
+ * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
+ *
+ * Based on work by:
+ * Copyright © 2013 Tom St Denis <tstdenis@elliptictech.com>
+ * Based on crypto/xcbc.c:
+ * Copyright © 2006 USAGI/WIDE Project,
+ * Author: Kazunori Miyazawa <miyazawa@linux-ipv6.org>
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ */
+
+#include <crypto/internal/hash.h>
+#include <linux/err.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+
+/*
+ * +------------------------
+ * | <parent tfm>
+ * +------------------------
+ * | cmac_tfm_ctx
+ * +------------------------
+ * | consts (block size * 2)
+ * +------------------------
+ */
+struct cmac_tfm_ctx {
+ struct crypto_cipher *child;
+ u8 ctx[];
+};
+
+/*
+ * +------------------------
+ * | <shash desc>
+ * +------------------------
+ * | cmac_desc_ctx
+ * +------------------------
+ * | odds (block size)
+ * +------------------------
+ * | prev (block size)
+ * +------------------------
+ */
+struct cmac_desc_ctx {
+ unsigned int len;
+ u8 ctx[];
+};
+
+static int crypto_cmac_digest_setkey(struct crypto_shash *parent,
+ const u8 *inkey, unsigned int keylen)
+{
+ unsigned long alignmask = crypto_shash_alignmask(parent);
+ struct cmac_tfm_ctx *ctx = crypto_shash_ctx(parent);
+ unsigned int bs = crypto_shash_blocksize(parent);
+ __be64 *consts = PTR_ALIGN((void *)ctx->ctx, alignmask + 1);
+ u64 _const[2];
+ int i, err = 0;
+ u8 msb_mask, gfmask;
+
+ err = crypto_cipher_setkey(ctx->child, inkey, keylen);
+ if (err)
+ return err;
+
+ /* encrypt the zero block */
+ memset(consts, 0, bs);
+ crypto_cipher_encrypt_one(ctx->child, (u8 *)consts, (u8 *)consts);
+
+ switch (bs) {
+ case 16:
+ gfmask = 0x87;
+ _const[0] = be64_to_cpu(consts[1]);
+ _const[1] = be64_to_cpu(consts[0]);
+
+ /* gf(2^128) multiply zero-ciphertext with u and u^2 */
+ for (i = 0; i < 4; i += 2) {
+ msb_mask = ((s64)_const[1] >> 63) & gfmask;
+ _const[1] = (_const[1] << 1) | (_const[0] >> 63);
+ _const[0] = (_const[0] << 1) ^ msb_mask;
+
+ consts[i + 0] = cpu_to_be64(_const[1]);
+ consts[i + 1] = cpu_to_be64(_const[0]);
+ }
+
+ break;
+ case 8:
+ gfmask = 0x1B;
+ _const[0] = be64_to_cpu(consts[0]);
+
+ /* gf(2^64) multiply zero-ciphertext with u and u^2 */
+ for (i = 0; i < 2; i++) {
+ msb_mask = ((s64)_const[0] >> 63) & gfmask;
+ _const[0] = (_const[0] << 1) ^ msb_mask;
+
+ consts[i] = cpu_to_be64(_const[0]);
+ }
+
+ break;
+ }
+
+ return 0;
+}
+
+static int crypto_cmac_digest_init(struct shash_desc *pdesc)
+{
+ unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm);
+ struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
+ int bs = crypto_shash_blocksize(pdesc->tfm);
+ u8 *prev = PTR_ALIGN((void *)ctx->ctx, alignmask + 1) + bs;
+
+ ctx->len = 0;
+ memset(prev, 0, bs);
+
+ return 0;
+}
+
+static int crypto_cmac_digest_update(struct shash_desc *pdesc, const u8 *p,
+ unsigned int len)
+{
+ struct crypto_shash *parent = pdesc->tfm;
+ unsigned long alignmask = crypto_shash_alignmask(parent);
+ struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
+ struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
+ struct crypto_cipher *tfm = tctx->child;
+ int bs = crypto_shash_blocksize(parent);
+ u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1);
+ u8 *prev = odds + bs;
+
+ /* checking the data can fill the block */
+ if ((ctx->len + len) <= bs) {
+ memcpy(odds + ctx->len, p, len);
+ ctx->len += len;
+ return 0;
+ }
+
+ /* filling odds with new data and encrypting it */
+ memcpy(odds + ctx->len, p, bs - ctx->len);
+ len -= bs - ctx->len;
+ p += bs - ctx->len;
+
+ crypto_xor(prev, odds, bs);
+ crypto_cipher_encrypt_one(tfm, prev, prev);
+
+ /* clearing the length */
+ ctx->len = 0;
+
+ /* encrypting the rest of data */
+ while (len > bs) {
+ crypto_xor(prev, p, bs);
+ crypto_cipher_encrypt_one(tfm, prev, prev);
+ p += bs;
+ len -= bs;
+ }
+
+ /* keeping the surplus of blocksize */
+ if (len) {
+ memcpy(odds, p, len);
+ ctx->len = len;
+ }
+
+ return 0;
+}
+
+static int crypto_cmac_digest_final(struct shash_desc *pdesc, u8 *out)
+{
+ struct crypto_shash *parent = pdesc->tfm;
+ unsigned long alignmask = crypto_shash_alignmask(parent);
+ struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
+ struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
+ struct crypto_cipher *tfm = tctx->child;
+ int bs = crypto_shash_blocksize(parent);
+ u8 *consts = PTR_ALIGN((void *)tctx->ctx, alignmask + 1);
+ u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1);
+ u8 *prev = odds + bs;
+ unsigned int offset = 0;
+
+ if (ctx->len != bs) {
+ unsigned int rlen;
+ u8 *p = odds + ctx->len;
+
+ *p = 0x80;
+ p++;
+
+ rlen = bs - ctx->len - 1;
+ if (rlen)
+ memset(p, 0, rlen);
+
+ offset += bs;
+ }
+
+ crypto_xor(prev, odds, bs);
+ crypto_xor(prev, consts + offset, bs);
+
+ crypto_cipher_encrypt_one(tfm, out, prev);
+
+ return 0;
+}
+
+static int cmac_init_tfm(struct crypto_tfm *tfm)
+{
+ struct crypto_cipher *cipher;
+ struct crypto_instance *inst = (void *)tfm->__crt_alg;
+ struct crypto_spawn *spawn = crypto_instance_ctx(inst);
+ struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ cipher = crypto_spawn_cipher(spawn);
+ if (IS_ERR(cipher))
+ return PTR_ERR(cipher);
+
+ ctx->child = cipher;
+
+ return 0;
+};
+
+static void cmac_exit_tfm(struct crypto_tfm *tfm)
+{
+ struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
+ crypto_free_cipher(ctx->child);
+}
+
+static int cmac_create(struct crypto_template *tmpl, struct rtattr **tb)
+{
+ struct shash_instance *inst;
+ struct crypto_alg *alg;
+ unsigned long alignmask;
+ int err;
+
+ err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
+ if (err)
+ return err;
+
+ alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
+ CRYPTO_ALG_TYPE_MASK);
+ if (IS_ERR(alg))
+ return PTR_ERR(alg);
+
+ switch (alg->cra_blocksize) {
+ case 16:
+ case 8:
+ break;
+ default:
+ goto out_put_alg;
+ }
+
+ inst = shash_alloc_instance("cmac", alg);
+ err = PTR_ERR(inst);
+ if (IS_ERR(inst))
+ goto out_put_alg;
+
+ err = crypto_init_spawn(shash_instance_ctx(inst), alg,
+ shash_crypto_instance(inst),
+ CRYPTO_ALG_TYPE_MASK);
+ if (err)
+ goto out_free_inst;
+
+ alignmask = alg->cra_alignmask | (sizeof(long) - 1);
+ inst->alg.base.cra_alignmask = alignmask;
+ inst->alg.base.cra_priority = alg->cra_priority;
+ inst->alg.base.cra_blocksize = alg->cra_blocksize;
+
+ inst->alg.digestsize = alg->cra_blocksize;
+ inst->alg.descsize =
+ ALIGN(sizeof(struct cmac_desc_ctx), crypto_tfm_ctx_alignment())
+ + (alignmask & ~(crypto_tfm_ctx_alignment() - 1))
+ + alg->cra_blocksize * 2;
+
+ inst->alg.base.cra_ctxsize =
+ ALIGN(sizeof(struct cmac_tfm_ctx), alignmask + 1)
+ + alg->cra_blocksize * 2;
+
+ inst->alg.base.cra_init = cmac_init_tfm;
+ inst->alg.base.cra_exit = cmac_exit_tfm;
+
+ inst->alg.init = crypto_cmac_digest_init;
+ inst->alg.update = crypto_cmac_digest_update;
+ inst->alg.final = crypto_cmac_digest_final;
+ inst->alg.setkey = crypto_cmac_digest_setkey;
+
+ err = shash_register_instance(tmpl, inst);
+ if (err) {
+out_free_inst:
+ shash_free_instance(shash_crypto_instance(inst));
+ }
+
+out_put_alg:
+ crypto_mod_put(alg);
+ return err;
+}
+
+static struct crypto_template crypto_cmac_tmpl = {
+ .name = "cmac",
+ .create = cmac_create,
+ .free = shash_free_instance,
+ .module = THIS_MODULE,
+};
+
+static int __init crypto_cmac_module_init(void)
+{
+ return crypto_register_template(&crypto_cmac_tmpl);
+}
+
+static void __exit crypto_cmac_module_exit(void)
+{
+ crypto_unregister_template(&crypto_cmac_tmpl);
+}
+
+module_init(crypto_cmac_module_init);
+module_exit(crypto_cmac_module_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("CMAC keyed hash algorithm");
diff --git a/crypto/compress.c b/crypto/compress.c
index 1ee357085d3..c33f0763a95 100644
--- a/crypto/compress.c
+++ b/crypto/compress.c
@@ -7,7 +7,7 @@
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
+ * Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
@@ -39,7 +39,7 @@ int crypto_init_compress_ops(struct crypto_tfm *tfm)
ops->cot_compress = crypto_compress;
ops->cot_decompress = crypto_decompress;
-
+
return 0;
}
diff --git a/crypto/crc32.c b/crypto/crc32.c
new file mode 100644
index 00000000000..9d1c4156989
--- /dev/null
+++ b/crypto/crc32.c
@@ -0,0 +1,158 @@
+/* GPL HEADER START
+ *
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 only,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License version 2 for more details (a copy is included
+ * in the LICENSE file that accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License
+ * version 2 along with this program; If not, see http://www.gnu.org/licenses
+ *
+ * Please visit http://www.xyratex.com/contact if you need additional
+ * information or have any questions.
+ *
+ * GPL HEADER END
+ */
+
+/*
+ * Copyright 2012 Xyratex Technology Limited
+ */
+
+/*
+ * This is crypto api shash wrappers to crc32_le.
+ */
+
+#include <linux/crc32.h>
+#include <crypto/internal/hash.h>
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/string.h>
+#include <linux/kernel.h>
+
+#define CHKSUM_BLOCK_SIZE 1
+#define CHKSUM_DIGEST_SIZE 4
+
+static u32 __crc32_le(u32 crc, unsigned char const *p, size_t len)
+{
+ return crc32_le(crc, p, len);
+}
+
+/** No default init with ~0 */
+static int crc32_cra_init(struct crypto_tfm *tfm)
+{
+ u32 *key = crypto_tfm_ctx(tfm);
+
+ *key = 0;
+
+ return 0;
+}
+
+
+/*
+ * Setting the seed allows arbitrary accumulators and flexible XOR policy
+ * If your algorithm starts with ~0, then XOR with ~0 before you set
+ * the seed.
+ */
+static int crc32_setkey(struct crypto_shash *hash, const u8 *key,
+ unsigned int keylen)
+{
+ u32 *mctx = crypto_shash_ctx(hash);
+
+ if (keylen != sizeof(u32)) {
+ crypto_shash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ return -EINVAL;
+ }
+ *mctx = le32_to_cpup((__le32 *)key);
+ return 0;
+}
+
+static int crc32_init(struct shash_desc *desc)
+{
+ u32 *mctx = crypto_shash_ctx(desc->tfm);
+ u32 *crcp = shash_desc_ctx(desc);
+
+ *crcp = *mctx;
+
+ return 0;
+}
+
+static int crc32_update(struct shash_desc *desc, const u8 *data,
+ unsigned int len)
+{
+ u32 *crcp = shash_desc_ctx(desc);
+
+ *crcp = __crc32_le(*crcp, data, len);
+ return 0;
+}
+
+/* No final XOR 0xFFFFFFFF, like crc32_le */
+static int __crc32_finup(u32 *crcp, const u8 *data, unsigned int len,
+ u8 *out)
+{
+ *(__le32 *)out = cpu_to_le32(__crc32_le(*crcp, data, len));
+ return 0;
+}
+
+static int crc32_finup(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ return __crc32_finup(shash_desc_ctx(desc), data, len, out);
+}
+
+static int crc32_final(struct shash_desc *desc, u8 *out)
+{
+ u32 *crcp = shash_desc_ctx(desc);
+
+ *(__le32 *)out = cpu_to_le32p(crcp);
+ return 0;
+}
+
+static int crc32_digest(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ return __crc32_finup(crypto_shash_ctx(desc->tfm), data, len,
+ out);
+}
+static struct shash_alg alg = {
+ .setkey = crc32_setkey,
+ .init = crc32_init,
+ .update = crc32_update,
+ .final = crc32_final,
+ .finup = crc32_finup,
+ .digest = crc32_digest,
+ .descsize = sizeof(u32),
+ .digestsize = CHKSUM_DIGEST_SIZE,
+ .base = {
+ .cra_name = "crc32",
+ .cra_driver_name = "crc32-table",
+ .cra_priority = 100,
+ .cra_blocksize = CHKSUM_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(u32),
+ .cra_module = THIS_MODULE,
+ .cra_init = crc32_cra_init,
+ }
+};
+
+static int __init crc32_mod_init(void)
+{
+ return crypto_register_shash(&alg);
+}
+
+static void __exit crc32_mod_fini(void)
+{
+ crypto_unregister_shash(&alg);
+}
+
+module_init(crc32_mod_init);
+module_exit(crc32_mod_fini);
+
+MODULE_AUTHOR("Alexander Boyko <alexander_boyko@xyratex.com>");
+MODULE_DESCRIPTION("CRC32 calculations wrapper for lib/crc32");
+MODULE_LICENSE("GPL");
diff --git a/crypto/crc32c.c b/crypto/crc32c.c
deleted file mode 100644
index 973bc2cfab2..00000000000
--- a/crypto/crc32c.c
+++ /dev/null
@@ -1,260 +0,0 @@
-/*
- * Cryptographic API.
- *
- * CRC32C chksum
- *
- *@Article{castagnoli-crc,
- * author = { Guy Castagnoli and Stefan Braeuer and Martin Herrman},
- * title = {{Optimization of Cyclic Redundancy-Check Codes with 24
- * and 32 Parity Bits}},
- * journal = IEEE Transactions on Communication,
- * year = {1993},
- * volume = {41},
- * number = {6},
- * pages = {},
- * month = {June},
- *}
- * Used by the iSCSI driver, possibly others, and derived from the
- * the iscsi-crc.c module of the linux-iscsi driver at
- * http://linux-iscsi.sourceforge.net.
- *
- * Following the example of lib/crc32, this function is intended to be
- * flexible and useful for all users. Modules that currently have their
- * own crc32c, but hopefully may be able to use this one are:
- * net/sctp (please add all your doco to here if you change to
- * use this one!)
- * <endoflist>
- *
- * Copyright (c) 2004 Cisco Systems, Inc.
- * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
- *
- * This program is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
- * any later version.
- *
- */
-
-#include <crypto/internal/hash.h>
-#include <linux/init.h>
-#include <linux/module.h>
-#include <linux/string.h>
-#include <linux/kernel.h>
-
-#define CHKSUM_BLOCK_SIZE 1
-#define CHKSUM_DIGEST_SIZE 4
-
-struct chksum_ctx {
- u32 key;
-};
-
-struct chksum_desc_ctx {
- u32 crc;
-};
-
-/*
- * This is the CRC-32C table
- * Generated with:
- * width = 32 bits
- * poly = 0x1EDC6F41
- * reflect input bytes = true
- * reflect output bytes = true
- */
-
-static const u32 crc32c_table[256] = {
- 0x00000000L, 0xF26B8303L, 0xE13B70F7L, 0x1350F3F4L,
- 0xC79A971FL, 0x35F1141CL, 0x26A1E7E8L, 0xD4CA64EBL,
- 0x8AD958CFL, 0x78B2DBCCL, 0x6BE22838L, 0x9989AB3BL,
- 0x4D43CFD0L, 0xBF284CD3L, 0xAC78BF27L, 0x5E133C24L,
- 0x105EC76FL, 0xE235446CL, 0xF165B798L, 0x030E349BL,
- 0xD7C45070L, 0x25AFD373L, 0x36FF2087L, 0xC494A384L,
- 0x9A879FA0L, 0x68EC1CA3L, 0x7BBCEF57L, 0x89D76C54L,
- 0x5D1D08BFL, 0xAF768BBCL, 0xBC267848L, 0x4E4DFB4BL,
- 0x20BD8EDEL, 0xD2D60DDDL, 0xC186FE29L, 0x33ED7D2AL,
- 0xE72719C1L, 0x154C9AC2L, 0x061C6936L, 0xF477EA35L,
- 0xAA64D611L, 0x580F5512L, 0x4B5FA6E6L, 0xB93425E5L,
- 0x6DFE410EL, 0x9F95C20DL, 0x8CC531F9L, 0x7EAEB2FAL,
- 0x30E349B1L, 0xC288CAB2L, 0xD1D83946L, 0x23B3BA45L,
- 0xF779DEAEL, 0x05125DADL, 0x1642AE59L, 0xE4292D5AL,
- 0xBA3A117EL, 0x4851927DL, 0x5B016189L, 0xA96AE28AL,
- 0x7DA08661L, 0x8FCB0562L, 0x9C9BF696L, 0x6EF07595L,
- 0x417B1DBCL, 0xB3109EBFL, 0xA0406D4BL, 0x522BEE48L,
- 0x86E18AA3L, 0x748A09A0L, 0x67DAFA54L, 0x95B17957L,
- 0xCBA24573L, 0x39C9C670L, 0x2A993584L, 0xD8F2B687L,
- 0x0C38D26CL, 0xFE53516FL, 0xED03A29BL, 0x1F682198L,
- 0x5125DAD3L, 0xA34E59D0L, 0xB01EAA24L, 0x42752927L,
- 0x96BF4DCCL, 0x64D4CECFL, 0x77843D3BL, 0x85EFBE38L,
- 0xDBFC821CL, 0x2997011FL, 0x3AC7F2EBL, 0xC8AC71E8L,
- 0x1C661503L, 0xEE0D9600L, 0xFD5D65F4L, 0x0F36E6F7L,
- 0x61C69362L, 0x93AD1061L, 0x80FDE395L, 0x72966096L,
- 0xA65C047DL, 0x5437877EL, 0x4767748AL, 0xB50CF789L,
- 0xEB1FCBADL, 0x197448AEL, 0x0A24BB5AL, 0xF84F3859L,
- 0x2C855CB2L, 0xDEEEDFB1L, 0xCDBE2C45L, 0x3FD5AF46L,
- 0x7198540DL, 0x83F3D70EL, 0x90A324FAL, 0x62C8A7F9L,
- 0xB602C312L, 0x44694011L, 0x5739B3E5L, 0xA55230E6L,
- 0xFB410CC2L, 0x092A8FC1L, 0x1A7A7C35L, 0xE811FF36L,
- 0x3CDB9BDDL, 0xCEB018DEL, 0xDDE0EB2AL, 0x2F8B6829L,
- 0x82F63B78L, 0x709DB87BL, 0x63CD4B8FL, 0x91A6C88CL,
- 0x456CAC67L, 0xB7072F64L, 0xA457DC90L, 0x563C5F93L,
- 0x082F63B7L, 0xFA44E0B4L, 0xE9141340L, 0x1B7F9043L,
- 0xCFB5F4A8L, 0x3DDE77ABL, 0x2E8E845FL, 0xDCE5075CL,
- 0x92A8FC17L, 0x60C37F14L, 0x73938CE0L, 0x81F80FE3L,
- 0x55326B08L, 0xA759E80BL, 0xB4091BFFL, 0x466298FCL,
- 0x1871A4D8L, 0xEA1A27DBL, 0xF94AD42FL, 0x0B21572CL,
- 0xDFEB33C7L, 0x2D80B0C4L, 0x3ED04330L, 0xCCBBC033L,
- 0xA24BB5A6L, 0x502036A5L, 0x4370C551L, 0xB11B4652L,
- 0x65D122B9L, 0x97BAA1BAL, 0x84EA524EL, 0x7681D14DL,
- 0x2892ED69L, 0xDAF96E6AL, 0xC9A99D9EL, 0x3BC21E9DL,
- 0xEF087A76L, 0x1D63F975L, 0x0E330A81L, 0xFC588982L,
- 0xB21572C9L, 0x407EF1CAL, 0x532E023EL, 0xA145813DL,
- 0x758FE5D6L, 0x87E466D5L, 0x94B49521L, 0x66DF1622L,
- 0x38CC2A06L, 0xCAA7A905L, 0xD9F75AF1L, 0x2B9CD9F2L,
- 0xFF56BD19L, 0x0D3D3E1AL, 0x1E6DCDEEL, 0xEC064EEDL,
- 0xC38D26C4L, 0x31E6A5C7L, 0x22B65633L, 0xD0DDD530L,
- 0x0417B1DBL, 0xF67C32D8L, 0xE52CC12CL, 0x1747422FL,
- 0x49547E0BL, 0xBB3FFD08L, 0xA86F0EFCL, 0x5A048DFFL,
- 0x8ECEE914L, 0x7CA56A17L, 0x6FF599E3L, 0x9D9E1AE0L,
- 0xD3D3E1ABL, 0x21B862A8L, 0x32E8915CL, 0xC083125FL,
- 0x144976B4L, 0xE622F5B7L, 0xF5720643L, 0x07198540L,
- 0x590AB964L, 0xAB613A67L, 0xB831C993L, 0x4A5A4A90L,
- 0x9E902E7BL, 0x6CFBAD78L, 0x7FAB5E8CL, 0x8DC0DD8FL,
- 0xE330A81AL, 0x115B2B19L, 0x020BD8EDL, 0xF0605BEEL,
- 0x24AA3F05L, 0xD6C1BC06L, 0xC5914FF2L, 0x37FACCF1L,
- 0x69E9F0D5L, 0x9B8273D6L, 0x88D28022L, 0x7AB90321L,
- 0xAE7367CAL, 0x5C18E4C9L, 0x4F48173DL, 0xBD23943EL,
- 0xF36E6F75L, 0x0105EC76L, 0x12551F82L, 0xE03E9C81L,
- 0x34F4F86AL, 0xC69F7B69L, 0xD5CF889DL, 0x27A40B9EL,
- 0x79B737BAL, 0x8BDCB4B9L, 0x988C474DL, 0x6AE7C44EL,
- 0xBE2DA0A5L, 0x4C4623A6L, 0x5F16D052L, 0xAD7D5351L
-};
-
-/*
- * Steps through buffer one byte at at time, calculates reflected
- * crc using table.
- */
-
-static u32 crc32c(u32 crc, const u8 *data, unsigned int length)
-{
- while (length--)
- crc = crc32c_table[(crc ^ *data++) & 0xFFL] ^ (crc >> 8);
-
- return crc;
-}
-
-/*
- * Steps through buffer one byte at at time, calculates reflected
- * crc using table.
- */
-
-static int chksum_init(struct shash_desc *desc)
-{
- struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm);
- struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
-
- ctx->crc = mctx->key;
-
- return 0;
-}
-
-/*
- * Setting the seed allows arbitrary accumulators and flexible XOR policy
- * If your algorithm starts with ~0, then XOR with ~0 before you set
- * the seed.
- */
-static int chksum_setkey(struct crypto_shash *tfm, const u8 *key,
- unsigned int keylen)
-{
- struct chksum_ctx *mctx = crypto_shash_ctx(tfm);
-
- if (keylen != sizeof(mctx->key)) {
- crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
- return -EINVAL;
- }
- mctx->key = le32_to_cpu(*(__le32 *)key);
- return 0;
-}
-
-static int chksum_update(struct shash_desc *desc, const u8 *data,
- unsigned int length)
-{
- struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
-
- ctx->crc = crc32c(ctx->crc, data, length);
- return 0;
-}
-
-static int chksum_final(struct shash_desc *desc, u8 *out)
-{
- struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
-
- *(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
- return 0;
-}
-
-static int __chksum_finup(u32 *crcp, const u8 *data, unsigned int len, u8 *out)
-{
- *(__le32 *)out = ~cpu_to_le32(crc32c(*crcp, data, len));
- return 0;
-}
-
-static int chksum_finup(struct shash_desc *desc, const u8 *data,
- unsigned int len, u8 *out)
-{
- struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
-
- return __chksum_finup(&ctx->crc, data, len, out);
-}
-
-static int chksum_digest(struct shash_desc *desc, const u8 *data,
- unsigned int length, u8 *out)
-{
- struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm);
-
- return __chksum_finup(&mctx->key, data, length, out);
-}
-
-static int crc32c_cra_init(struct crypto_tfm *tfm)
-{
- struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
-
- mctx->key = ~0;
- return 0;
-}
-
-static struct shash_alg alg = {
- .digestsize = CHKSUM_DIGEST_SIZE,
- .setkey = chksum_setkey,
- .init = chksum_init,
- .update = chksum_update,
- .final = chksum_final,
- .finup = chksum_finup,
- .digest = chksum_digest,
- .descsize = sizeof(struct chksum_desc_ctx),
- .base = {
- .cra_name = "crc32c",
- .cra_driver_name = "crc32c-generic",
- .cra_priority = 100,
- .cra_blocksize = CHKSUM_BLOCK_SIZE,
- .cra_alignmask = 3,
- .cra_ctxsize = sizeof(struct chksum_ctx),
- .cra_module = THIS_MODULE,
- .cra_init = crc32c_cra_init,
- }
-};
-
-static int __init crc32c_mod_init(void)
-{
- return crypto_register_shash(&alg);
-}
-
-static void __exit crc32c_mod_fini(void)
-{
- crypto_unregister_shash(&alg);
-}
-
-module_init(crc32c_mod_init);
-module_exit(crc32c_mod_fini);
-
-MODULE_AUTHOR("Clay Haapala <chaapala@cisco.com>");
-MODULE_DESCRIPTION("CRC32c (Castagnoli) calculations wrapper for lib/crc32c");
-MODULE_LICENSE("GPL");
diff --git a/crypto/crc32c_generic.c b/crypto/crc32c_generic.c
new file mode 100644
index 00000000000..d9c7beba8e5
--- /dev/null
+++ b/crypto/crc32c_generic.c
@@ -0,0 +1,174 @@
+/*
+ * Cryptographic API.
+ *
+ * CRC32C chksum
+ *
+ *@Article{castagnoli-crc,
+ * author = { Guy Castagnoli and Stefan Braeuer and Martin Herrman},
+ * title = {{Optimization of Cyclic Redundancy-Check Codes with 24
+ * and 32 Parity Bits}},
+ * journal = IEEE Transactions on Communication,
+ * year = {1993},
+ * volume = {41},
+ * number = {6},
+ * pages = {},
+ * month = {June},
+ *}
+ * Used by the iSCSI driver, possibly others, and derived from the
+ * the iscsi-crc.c module of the linux-iscsi driver at
+ * http://linux-iscsi.sourceforge.net.
+ *
+ * Following the example of lib/crc32, this function is intended to be
+ * flexible and useful for all users. Modules that currently have their
+ * own crc32c, but hopefully may be able to use this one are:
+ * net/sctp (please add all your doco to here if you change to
+ * use this one!)
+ * <endoflist>
+ *
+ * Copyright (c) 2004 Cisco Systems, Inc.
+ * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <crypto/internal/hash.h>
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/string.h>
+#include <linux/kernel.h>
+#include <linux/crc32.h>
+
+#define CHKSUM_BLOCK_SIZE 1
+#define CHKSUM_DIGEST_SIZE 4
+
+struct chksum_ctx {
+ u32 key;
+};
+
+struct chksum_desc_ctx {
+ u32 crc;
+};
+
+/*
+ * Steps through buffer one byte at at time, calculates reflected
+ * crc using table.
+ */
+
+static int chksum_init(struct shash_desc *desc)
+{
+ struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm);
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ ctx->crc = mctx->key;
+
+ return 0;
+}
+
+/*
+ * Setting the seed allows arbitrary accumulators and flexible XOR policy
+ * If your algorithm starts with ~0, then XOR with ~0 before you set
+ * the seed.
+ */
+static int chksum_setkey(struct crypto_shash *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ struct chksum_ctx *mctx = crypto_shash_ctx(tfm);
+
+ if (keylen != sizeof(mctx->key)) {
+ crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ return -EINVAL;
+ }
+ mctx->key = le32_to_cpu(*(__le32 *)key);
+ return 0;
+}
+
+static int chksum_update(struct shash_desc *desc, const u8 *data,
+ unsigned int length)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ ctx->crc = __crc32c_le(ctx->crc, data, length);
+ return 0;
+}
+
+static int chksum_final(struct shash_desc *desc, u8 *out)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ *(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
+ return 0;
+}
+
+static int __chksum_finup(u32 *crcp, const u8 *data, unsigned int len, u8 *out)
+{
+ *(__le32 *)out = ~cpu_to_le32(__crc32c_le(*crcp, data, len));
+ return 0;
+}
+
+static int chksum_finup(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ return __chksum_finup(&ctx->crc, data, len, out);
+}
+
+static int chksum_digest(struct shash_desc *desc, const u8 *data,
+ unsigned int length, u8 *out)
+{
+ struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm);
+
+ return __chksum_finup(&mctx->key, data, length, out);
+}
+
+static int crc32c_cra_init(struct crypto_tfm *tfm)
+{
+ struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
+
+ mctx->key = ~0;
+ return 0;
+}
+
+static struct shash_alg alg = {
+ .digestsize = CHKSUM_DIGEST_SIZE,
+ .setkey = chksum_setkey,
+ .init = chksum_init,
+ .update = chksum_update,
+ .final = chksum_final,
+ .finup = chksum_finup,
+ .digest = chksum_digest,
+ .descsize = sizeof(struct chksum_desc_ctx),
+ .base = {
+ .cra_name = "crc32c",
+ .cra_driver_name = "crc32c-generic",
+ .cra_priority = 100,
+ .cra_blocksize = CHKSUM_BLOCK_SIZE,
+ .cra_alignmask = 3,
+ .cra_ctxsize = sizeof(struct chksum_ctx),
+ .cra_module = THIS_MODULE,
+ .cra_init = crc32c_cra_init,
+ }
+};
+
+static int __init crc32c_mod_init(void)
+{
+ return crypto_register_shash(&alg);
+}
+
+static void __exit crc32c_mod_fini(void)
+{
+ crypto_unregister_shash(&alg);
+}
+
+module_init(crc32c_mod_init);
+module_exit(crc32c_mod_fini);
+
+MODULE_AUTHOR("Clay Haapala <chaapala@cisco.com>");
+MODULE_DESCRIPTION("CRC32c (Castagnoli) calculations wrapper for lib/crc32c");
+MODULE_LICENSE("GPL");
+MODULE_ALIAS("crc32c");
+MODULE_SOFTDEP("pre: crc32c");
diff --git a/crypto/crct10dif_common.c b/crypto/crct10dif_common.c
new file mode 100644
index 00000000000..b2fab366f51
--- /dev/null
+++ b/crypto/crct10dif_common.c
@@ -0,0 +1,82 @@
+/*
+ * Cryptographic API.
+ *
+ * T10 Data Integrity Field CRC16 Crypto Transform
+ *
+ * Copyright (c) 2007 Oracle Corporation. All rights reserved.
+ * Written by Martin K. Petersen <martin.petersen@oracle.com>
+ * Copyright (C) 2013 Intel Corporation
+ * Author: Tim Chen <tim.c.chen@linux.intel.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+ * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ *
+ */
+
+#include <linux/crc-t10dif.h>
+#include <linux/module.h>
+#include <linux/kernel.h>
+
+/* Table generated using the following polynomium:
+ * x^16 + x^15 + x^11 + x^9 + x^8 + x^7 + x^5 + x^4 + x^2 + x + 1
+ * gt: 0x8bb7
+ */
+static const __u16 t10_dif_crc_table[256] = {
+ 0x0000, 0x8BB7, 0x9CD9, 0x176E, 0xB205, 0x39B2, 0x2EDC, 0xA56B,
+ 0xEFBD, 0x640A, 0x7364, 0xF8D3, 0x5DB8, 0xD60F, 0xC161, 0x4AD6,
+ 0x54CD, 0xDF7A, 0xC814, 0x43A3, 0xE6C8, 0x6D7F, 0x7A11, 0xF1A6,
+ 0xBB70, 0x30C7, 0x27A9, 0xAC1E, 0x0975, 0x82C2, 0x95AC, 0x1E1B,
+ 0xA99A, 0x222D, 0x3543, 0xBEF4, 0x1B9F, 0x9028, 0x8746, 0x0CF1,
+ 0x4627, 0xCD90, 0xDAFE, 0x5149, 0xF422, 0x7F95, 0x68FB, 0xE34C,
+ 0xFD57, 0x76E0, 0x618E, 0xEA39, 0x4F52, 0xC4E5, 0xD38B, 0x583C,
+ 0x12EA, 0x995D, 0x8E33, 0x0584, 0xA0EF, 0x2B58, 0x3C36, 0xB781,
+ 0xD883, 0x5334, 0x445A, 0xCFED, 0x6A86, 0xE131, 0xF65F, 0x7DE8,
+ 0x373E, 0xBC89, 0xABE7, 0x2050, 0x853B, 0x0E8C, 0x19E2, 0x9255,
+ 0x8C4E, 0x07F9, 0x1097, 0x9B20, 0x3E4B, 0xB5FC, 0xA292, 0x2925,
+ 0x63F3, 0xE844, 0xFF2A, 0x749D, 0xD1F6, 0x5A41, 0x4D2F, 0xC698,
+ 0x7119, 0xFAAE, 0xEDC0, 0x6677, 0xC31C, 0x48AB, 0x5FC5, 0xD472,
+ 0x9EA4, 0x1513, 0x027D, 0x89CA, 0x2CA1, 0xA716, 0xB078, 0x3BCF,
+ 0x25D4, 0xAE63, 0xB90D, 0x32BA, 0x97D1, 0x1C66, 0x0B08, 0x80BF,
+ 0xCA69, 0x41DE, 0x56B0, 0xDD07, 0x786C, 0xF3DB, 0xE4B5, 0x6F02,
+ 0x3AB1, 0xB106, 0xA668, 0x2DDF, 0x88B4, 0x0303, 0x146D, 0x9FDA,
+ 0xD50C, 0x5EBB, 0x49D5, 0xC262, 0x6709, 0xECBE, 0xFBD0, 0x7067,
+ 0x6E7C, 0xE5CB, 0xF2A5, 0x7912, 0xDC79, 0x57CE, 0x40A0, 0xCB17,
+ 0x81C1, 0x0A76, 0x1D18, 0x96AF, 0x33C4, 0xB873, 0xAF1D, 0x24AA,
+ 0x932B, 0x189C, 0x0FF2, 0x8445, 0x212E, 0xAA99, 0xBDF7, 0x3640,
+ 0x7C96, 0xF721, 0xE04F, 0x6BF8, 0xCE93, 0x4524, 0x524A, 0xD9FD,
+ 0xC7E6, 0x4C51, 0x5B3F, 0xD088, 0x75E3, 0xFE54, 0xE93A, 0x628D,
+ 0x285B, 0xA3EC, 0xB482, 0x3F35, 0x9A5E, 0x11E9, 0x0687, 0x8D30,
+ 0xE232, 0x6985, 0x7EEB, 0xF55C, 0x5037, 0xDB80, 0xCCEE, 0x4759,
+ 0x0D8F, 0x8638, 0x9156, 0x1AE1, 0xBF8A, 0x343D, 0x2353, 0xA8E4,
+ 0xB6FF, 0x3D48, 0x2A26, 0xA191, 0x04FA, 0x8F4D, 0x9823, 0x1394,
+ 0x5942, 0xD2F5, 0xC59B, 0x4E2C, 0xEB47, 0x60F0, 0x779E, 0xFC29,
+ 0x4BA8, 0xC01F, 0xD771, 0x5CC6, 0xF9AD, 0x721A, 0x6574, 0xEEC3,
+ 0xA415, 0x2FA2, 0x38CC, 0xB37B, 0x1610, 0x9DA7, 0x8AC9, 0x017E,
+ 0x1F65, 0x94D2, 0x83BC, 0x080B, 0xAD60, 0x26D7, 0x31B9, 0xBA0E,
+ 0xF0D8, 0x7B6F, 0x6C01, 0xE7B6, 0x42DD, 0xC96A, 0xDE04, 0x55B3
+};
+
+__u16 crc_t10dif_generic(__u16 crc, const unsigned char *buffer, size_t len)
+{
+ unsigned int i;
+
+ for (i = 0 ; i < len ; i++)
+ crc = (crc << 8) ^ t10_dif_crc_table[((crc >> 8) ^ buffer[i]) & 0xff];
+
+ return crc;
+}
+EXPORT_SYMBOL(crc_t10dif_generic);
+
+MODULE_DESCRIPTION("T10 DIF CRC calculation common code");
+MODULE_LICENSE("GPL");
diff --git a/crypto/crct10dif_generic.c b/crypto/crct10dif_generic.c
new file mode 100644
index 00000000000..877e7114ec5
--- /dev/null
+++ b/crypto/crct10dif_generic.c
@@ -0,0 +1,127 @@
+/*
+ * Cryptographic API.
+ *
+ * T10 Data Integrity Field CRC16 Crypto Transform
+ *
+ * Copyright (c) 2007 Oracle Corporation. All rights reserved.
+ * Written by Martin K. Petersen <martin.petersen@oracle.com>
+ * Copyright (C) 2013 Intel Corporation
+ * Author: Tim Chen <tim.c.chen@linux.intel.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+ * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ *
+ */
+
+#include <linux/module.h>
+#include <linux/crc-t10dif.h>
+#include <crypto/internal/hash.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+
+struct chksum_desc_ctx {
+ __u16 crc;
+};
+
+/*
+ * Steps through buffer one byte at at time, calculates reflected
+ * crc using table.
+ */
+
+static int chksum_init(struct shash_desc *desc)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ ctx->crc = 0;
+
+ return 0;
+}
+
+static int chksum_update(struct shash_desc *desc, const u8 *data,
+ unsigned int length)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ ctx->crc = crc_t10dif_generic(ctx->crc, data, length);
+ return 0;
+}
+
+static int chksum_final(struct shash_desc *desc, u8 *out)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ *(__u16 *)out = ctx->crc;
+ return 0;
+}
+
+static int __chksum_finup(__u16 *crcp, const u8 *data, unsigned int len,
+ u8 *out)
+{
+ *(__u16 *)out = crc_t10dif_generic(*crcp, data, len);
+ return 0;
+}
+
+static int chksum_finup(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ return __chksum_finup(&ctx->crc, data, len, out);
+}
+
+static int chksum_digest(struct shash_desc *desc, const u8 *data,
+ unsigned int length, u8 *out)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ return __chksum_finup(&ctx->crc, data, length, out);
+}
+
+static struct shash_alg alg = {
+ .digestsize = CRC_T10DIF_DIGEST_SIZE,
+ .init = chksum_init,
+ .update = chksum_update,
+ .final = chksum_final,
+ .finup = chksum_finup,
+ .digest = chksum_digest,
+ .descsize = sizeof(struct chksum_desc_ctx),
+ .base = {
+ .cra_name = "crct10dif",
+ .cra_driver_name = "crct10dif-generic",
+ .cra_priority = 100,
+ .cra_blocksize = CRC_T10DIF_BLOCK_SIZE,
+ .cra_module = THIS_MODULE,
+ }
+};
+
+static int __init crct10dif_mod_init(void)
+{
+ int ret;
+
+ ret = crypto_register_shash(&alg);
+ return ret;
+}
+
+static void __exit crct10dif_mod_fini(void)
+{
+ crypto_unregister_shash(&alg);
+}
+
+module_init(crct10dif_mod_init);
+module_exit(crct10dif_mod_fini);
+
+MODULE_AUTHOR("Tim Chen <tim.c.chen@linux.intel.com>");
+MODULE_DESCRIPTION("T10 DIF CRC calculation.");
+MODULE_LICENSE("GPL");
+MODULE_ALIAS("crct10dif");
diff --git a/crypto/cryptd.c b/crypto/cryptd.c
index d29e06b350f..7bdd61b867c 100644
--- a/crypto/cryptd.c
+++ b/crypto/cryptd.c
@@ -3,6 +3,13 @@
*
* Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
*
+ * Added AEAD support to cryptd.
+ * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
+ * Adrian Hoban <adrian.hoban@intel.com>
+ * Gabriele Paoloni <gabriele.paoloni@intel.com>
+ * Aidan O'Mahony (aidan.o.mahony@intel.com)
+ * Copyright (c) 2010, Intel Corporation.
+ *
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
@@ -12,30 +19,42 @@
#include <crypto/algapi.h>
#include <crypto/internal/hash.h>
+#include <crypto/internal/aead.h>
+#include <crypto/cryptd.h>
+#include <crypto/crypto_wq.h>
#include <linux/err.h>
#include <linux/init.h>
#include <linux/kernel.h>
-#include <linux/kthread.h>
#include <linux/list.h>
#include <linux/module.h>
-#include <linux/mutex.h>
#include <linux/scatterlist.h>
#include <linux/sched.h>
#include <linux/slab.h>
-#include <linux/spinlock.h>
-#define CRYPTD_MAX_QLEN 100
+#define CRYPTD_MAX_CPU_QLEN 100
-struct cryptd_state {
- spinlock_t lock;
- struct mutex mutex;
+struct cryptd_cpu_queue {
struct crypto_queue queue;
- struct task_struct *task;
+ struct work_struct work;
+};
+
+struct cryptd_queue {
+ struct cryptd_cpu_queue __percpu *cpu_queue;
};
struct cryptd_instance_ctx {
struct crypto_spawn spawn;
- struct cryptd_state *state;
+ struct cryptd_queue *queue;
+};
+
+struct hashd_instance_ctx {
+ struct crypto_shash_spawn spawn;
+ struct cryptd_queue *queue;
+};
+
+struct aead_instance_ctx {
+ struct crypto_aead_spawn aead_spawn;
+ struct cryptd_queue *queue;
};
struct cryptd_blkcipher_ctx {
@@ -47,18 +66,106 @@ struct cryptd_blkcipher_request_ctx {
};
struct cryptd_hash_ctx {
- struct crypto_hash *child;
+ struct crypto_shash *child;
};
struct cryptd_hash_request_ctx {
crypto_completion_t complete;
+ struct shash_desc desc;
};
-static inline struct cryptd_state *cryptd_get_state(struct crypto_tfm *tfm)
+struct cryptd_aead_ctx {
+ struct crypto_aead *child;
+};
+
+struct cryptd_aead_request_ctx {
+ crypto_completion_t complete;
+};
+
+static void cryptd_queue_worker(struct work_struct *work);
+
+static int cryptd_init_queue(struct cryptd_queue *queue,
+ unsigned int max_cpu_qlen)
+{
+ int cpu;
+ struct cryptd_cpu_queue *cpu_queue;
+
+ queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
+ if (!queue->cpu_queue)
+ return -ENOMEM;
+ for_each_possible_cpu(cpu) {
+ cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
+ crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
+ INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
+ }
+ return 0;
+}
+
+static void cryptd_fini_queue(struct cryptd_queue *queue)
+{
+ int cpu;
+ struct cryptd_cpu_queue *cpu_queue;
+
+ for_each_possible_cpu(cpu) {
+ cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
+ BUG_ON(cpu_queue->queue.qlen);
+ }
+ free_percpu(queue->cpu_queue);
+}
+
+static int cryptd_enqueue_request(struct cryptd_queue *queue,
+ struct crypto_async_request *request)
+{
+ int cpu, err;
+ struct cryptd_cpu_queue *cpu_queue;
+
+ cpu = get_cpu();
+ cpu_queue = this_cpu_ptr(queue->cpu_queue);
+ err = crypto_enqueue_request(&cpu_queue->queue, request);
+ queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
+ put_cpu();
+
+ return err;
+}
+
+/* Called in workqueue context, do one real cryption work (via
+ * req->complete) and reschedule itself if there are more work to
+ * do. */
+static void cryptd_queue_worker(struct work_struct *work)
+{
+ struct cryptd_cpu_queue *cpu_queue;
+ struct crypto_async_request *req, *backlog;
+
+ cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
+ /*
+ * Only handle one request at a time to avoid hogging crypto workqueue.
+ * preempt_disable/enable is used to prevent being preempted by
+ * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
+ * cryptd_enqueue_request() being accessed from software interrupts.
+ */
+ local_bh_disable();
+ preempt_disable();
+ backlog = crypto_get_backlog(&cpu_queue->queue);
+ req = crypto_dequeue_request(&cpu_queue->queue);
+ preempt_enable();
+ local_bh_enable();
+
+ if (!req)
+ return;
+
+ if (backlog)
+ backlog->complete(backlog, -EINPROGRESS);
+ req->complete(req, 0);
+
+ if (cpu_queue->queue.qlen)
+ queue_work(kcrypto_wq, &cpu_queue->work);
+}
+
+static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
{
struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
- return ictx->state;
+ return ictx->queue;
}
static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
@@ -130,19 +237,13 @@ static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
{
struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
- struct cryptd_state *state =
- cryptd_get_state(crypto_ablkcipher_tfm(tfm));
- int err;
+ struct cryptd_queue *queue;
+ queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
rctx->complete = req->base.complete;
req->base.complete = complete;
- spin_lock_bh(&state->lock);
- err = ablkcipher_enqueue_request(&state->queue, req);
- spin_unlock_bh(&state->lock);
-
- wake_up_process(state->task);
- return err;
+ return cryptd_enqueue_request(queue, &req->base);
}
static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
@@ -176,45 +277,28 @@ static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
{
struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
- struct cryptd_state *state = cryptd_get_state(tfm);
- int active;
-
- mutex_lock(&state->mutex);
- active = ablkcipher_tfm_in_queue(&state->queue,
- __crypto_ablkcipher_cast(tfm));
- mutex_unlock(&state->mutex);
-
- BUG_ON(active);
crypto_free_blkcipher(ctx->child);
}
-static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg,
- struct cryptd_state *state)
+static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
+ unsigned int tail)
{
+ char *p;
struct crypto_instance *inst;
- struct cryptd_instance_ctx *ctx;
int err;
- inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
- if (!inst) {
- inst = ERR_PTR(-ENOMEM);
- goto out;
- }
+ p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
+ if (!p)
+ return ERR_PTR(-ENOMEM);
+
+ inst = (void *)(p + head);
err = -ENAMETOOLONG;
if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
"cryptd(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
goto out_free_inst;
- ctx = crypto_instance_ctx(inst);
- err = crypto_init_spawn(&ctx->spawn, alg, inst,
- CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
- if (err)
- goto out_free_inst;
-
- ctx->state = state;
-
memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
inst->alg.cra_priority = alg->cra_priority + 50;
@@ -222,29 +306,41 @@ static struct crypto_instance *cryptd_alloc_instance(struct crypto_alg *alg,
inst->alg.cra_alignmask = alg->cra_alignmask;
out:
- return inst;
+ return p;
out_free_inst:
- kfree(inst);
- inst = ERR_PTR(err);
+ kfree(p);
+ p = ERR_PTR(err);
goto out;
}
-static struct crypto_instance *cryptd_alloc_blkcipher(
- struct rtattr **tb, struct cryptd_state *state)
+static int cryptd_create_blkcipher(struct crypto_template *tmpl,
+ struct rtattr **tb,
+ struct cryptd_queue *queue)
{
+ struct cryptd_instance_ctx *ctx;
struct crypto_instance *inst;
struct crypto_alg *alg;
+ int err;
alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER,
CRYPTO_ALG_TYPE_MASK);
if (IS_ERR(alg))
- return ERR_CAST(alg);
+ return PTR_ERR(alg);
- inst = cryptd_alloc_instance(alg, state);
+ inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
+ err = PTR_ERR(inst);
if (IS_ERR(inst))
goto out_put_alg;
+ ctx = crypto_instance_ctx(inst);
+ ctx->queue = queue;
+
+ err = crypto_init_spawn(&ctx->spawn, alg, inst,
+ CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
+ if (err)
+ goto out_free_inst;
+
inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
inst->alg.cra_type = &crypto_ablkcipher_type;
@@ -263,58 +359,57 @@ static struct crypto_instance *cryptd_alloc_blkcipher(
inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
+ err = crypto_register_instance(tmpl, inst);
+ if (err) {
+ crypto_drop_spawn(&ctx->spawn);
+out_free_inst:
+ kfree(inst);
+ }
+
out_put_alg:
crypto_mod_put(alg);
- return inst;
+ return err;
}
static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
{
struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
- struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
- struct crypto_spawn *spawn = &ictx->spawn;
+ struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
+ struct crypto_shash_spawn *spawn = &ictx->spawn;
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
- struct crypto_hash *cipher;
+ struct crypto_shash *hash;
- cipher = crypto_spawn_hash(spawn);
- if (IS_ERR(cipher))
- return PTR_ERR(cipher);
+ hash = crypto_spawn_shash(spawn);
+ if (IS_ERR(hash))
+ return PTR_ERR(hash);
- ctx->child = cipher;
- tfm->crt_ahash.reqsize =
- sizeof(struct cryptd_hash_request_ctx);
+ ctx->child = hash;
+ crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
+ sizeof(struct cryptd_hash_request_ctx) +
+ crypto_shash_descsize(hash));
return 0;
}
static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
{
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
- struct cryptd_state *state = cryptd_get_state(tfm);
- int active;
- mutex_lock(&state->mutex);
- active = ahash_tfm_in_queue(&state->queue,
- __crypto_ahash_cast(tfm));
- mutex_unlock(&state->mutex);
-
- BUG_ON(active);
-
- crypto_free_hash(ctx->child);
+ crypto_free_shash(ctx->child);
}
static int cryptd_hash_setkey(struct crypto_ahash *parent,
const u8 *key, unsigned int keylen)
{
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
- struct crypto_hash *child = ctx->child;
+ struct crypto_shash *child = ctx->child;
int err;
- crypto_hash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
- crypto_hash_set_flags(child, crypto_ahash_get_flags(parent) &
- CRYPTO_TFM_REQ_MASK);
- err = crypto_hash_setkey(child, key, keylen);
- crypto_ahash_set_flags(parent, crypto_hash_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
+ crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
+ crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
+ CRYPTO_TFM_REQ_MASK);
+ err = crypto_shash_setkey(child, key, keylen);
+ crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
+ CRYPTO_TFM_RES_MASK);
return err;
}
@@ -323,38 +418,30 @@ static int cryptd_hash_enqueue(struct ahash_request *req,
{
struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- struct cryptd_state *state =
- cryptd_get_state(crypto_ahash_tfm(tfm));
- int err;
+ struct cryptd_queue *queue =
+ cryptd_get_queue(crypto_ahash_tfm(tfm));
rctx->complete = req->base.complete;
req->base.complete = complete;
- spin_lock_bh(&state->lock);
- err = ahash_enqueue_request(&state->queue, req);
- spin_unlock_bh(&state->lock);
-
- wake_up_process(state->task);
- return err;
+ return cryptd_enqueue_request(queue, &req->base);
}
static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
{
- struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
- struct crypto_hash *child = ctx->child;
- struct ahash_request *req = ahash_request_cast(req_async);
- struct cryptd_hash_request_ctx *rctx;
- struct hash_desc desc;
-
- rctx = ahash_request_ctx(req);
+ struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
+ struct crypto_shash *child = ctx->child;
+ struct ahash_request *req = ahash_request_cast(req_async);
+ struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
+ struct shash_desc *desc = &rctx->desc;
if (unlikely(err == -EINPROGRESS))
goto out;
- desc.tfm = child;
- desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
+ desc->tfm = child;
+ desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
- err = crypto_hash_crt(child)->init(&desc);
+ err = crypto_shash_init(desc);
req->base.complete = rctx->complete;
@@ -371,23 +458,15 @@ static int cryptd_hash_init_enqueue(struct ahash_request *req)
static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
{
- struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
- struct crypto_hash *child = ctx->child;
- struct ahash_request *req = ahash_request_cast(req_async);
+ struct ahash_request *req = ahash_request_cast(req_async);
struct cryptd_hash_request_ctx *rctx;
- struct hash_desc desc;
rctx = ahash_request_ctx(req);
if (unlikely(err == -EINPROGRESS))
goto out;
- desc.tfm = child;
- desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
-
- err = crypto_hash_crt(child)->update(&desc,
- req->src,
- req->nbytes);
+ err = shash_ahash_update(req, &rctx->desc);
req->base.complete = rctx->complete;
@@ -404,21 +483,13 @@ static int cryptd_hash_update_enqueue(struct ahash_request *req)
static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
{
- struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
- struct crypto_hash *child = ctx->child;
- struct ahash_request *req = ahash_request_cast(req_async);
- struct cryptd_hash_request_ctx *rctx;
- struct hash_desc desc;
-
- rctx = ahash_request_ctx(req);
+ struct ahash_request *req = ahash_request_cast(req_async);
+ struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
if (unlikely(err == -EINPROGRESS))
goto out;
- desc.tfm = child;
- desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
-
- err = crypto_hash_crt(child)->final(&desc, req->result);
+ err = crypto_shash_final(&rctx->desc, req->result);
req->base.complete = rctx->complete;
@@ -433,26 +504,44 @@ static int cryptd_hash_final_enqueue(struct ahash_request *req)
return cryptd_hash_enqueue(req, cryptd_hash_final);
}
-static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
+static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
{
- struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
- struct crypto_hash *child = ctx->child;
- struct ahash_request *req = ahash_request_cast(req_async);
- struct cryptd_hash_request_ctx *rctx;
- struct hash_desc desc;
+ struct ahash_request *req = ahash_request_cast(req_async);
+ struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
- rctx = ahash_request_ctx(req);
+ if (unlikely(err == -EINPROGRESS))
+ goto out;
+
+ err = shash_ahash_finup(req, &rctx->desc);
+
+ req->base.complete = rctx->complete;
+
+out:
+ local_bh_disable();
+ rctx->complete(&req->base, err);
+ local_bh_enable();
+}
+
+static int cryptd_hash_finup_enqueue(struct ahash_request *req)
+{
+ return cryptd_hash_enqueue(req, cryptd_hash_finup);
+}
+
+static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
+{
+ struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
+ struct crypto_shash *child = ctx->child;
+ struct ahash_request *req = ahash_request_cast(req_async);
+ struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
+ struct shash_desc *desc = &rctx->desc;
if (unlikely(err == -EINPROGRESS))
goto out;
- desc.tfm = child;
- desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
+ desc->tfm = child;
+ desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
- err = crypto_hash_crt(child)->digest(&desc,
- req->src,
- req->nbytes,
- req->result);
+ err = shash_ahash_digest(req, desc);
req->base.complete = rctx->complete;
@@ -467,156 +556,401 @@ static int cryptd_hash_digest_enqueue(struct ahash_request *req)
return cryptd_hash_enqueue(req, cryptd_hash_digest);
}
-static struct crypto_instance *cryptd_alloc_hash(
- struct rtattr **tb, struct cryptd_state *state)
+static int cryptd_hash_export(struct ahash_request *req, void *out)
{
- struct crypto_instance *inst;
+ struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
+
+ return crypto_shash_export(&rctx->desc, out);
+}
+
+static int cryptd_hash_import(struct ahash_request *req, const void *in)
+{
+ struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
+
+ return crypto_shash_import(&rctx->desc, in);
+}
+
+static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
+ struct cryptd_queue *queue)
+{
+ struct hashd_instance_ctx *ctx;
+ struct ahash_instance *inst;
+ struct shash_alg *salg;
struct crypto_alg *alg;
+ int err;
- alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_HASH,
- CRYPTO_ALG_TYPE_HASH_MASK);
- if (IS_ERR(alg))
- return ERR_PTR(PTR_ERR(alg));
+ salg = shash_attr_alg(tb[1], 0, 0);
+ if (IS_ERR(salg))
+ return PTR_ERR(salg);
- inst = cryptd_alloc_instance(alg, state);
+ alg = &salg->base;
+ inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
+ sizeof(*ctx));
+ err = PTR_ERR(inst);
if (IS_ERR(inst))
goto out_put_alg;
- inst->alg.cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC;
- inst->alg.cra_type = &crypto_ahash_type;
+ ctx = ahash_instance_ctx(inst);
+ ctx->queue = queue;
+
+ err = crypto_init_shash_spawn(&ctx->spawn, salg,
+ ahash_crypto_instance(inst));
+ if (err)
+ goto out_free_inst;
+
+ inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC;
+
+ inst->alg.halg.digestsize = salg->digestsize;
+ inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
- inst->alg.cra_ahash.digestsize = alg->cra_hash.digestsize;
- inst->alg.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
+ inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
+ inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
- inst->alg.cra_init = cryptd_hash_init_tfm;
- inst->alg.cra_exit = cryptd_hash_exit_tfm;
+ inst->alg.init = cryptd_hash_init_enqueue;
+ inst->alg.update = cryptd_hash_update_enqueue;
+ inst->alg.final = cryptd_hash_final_enqueue;
+ inst->alg.finup = cryptd_hash_finup_enqueue;
+ inst->alg.export = cryptd_hash_export;
+ inst->alg.import = cryptd_hash_import;
+ inst->alg.setkey = cryptd_hash_setkey;
+ inst->alg.digest = cryptd_hash_digest_enqueue;
- inst->alg.cra_ahash.init = cryptd_hash_init_enqueue;
- inst->alg.cra_ahash.update = cryptd_hash_update_enqueue;
- inst->alg.cra_ahash.final = cryptd_hash_final_enqueue;
- inst->alg.cra_ahash.setkey = cryptd_hash_setkey;
- inst->alg.cra_ahash.digest = cryptd_hash_digest_enqueue;
+ err = ahash_register_instance(tmpl, inst);
+ if (err) {
+ crypto_drop_shash(&ctx->spawn);
+out_free_inst:
+ kfree(inst);
+ }
out_put_alg:
crypto_mod_put(alg);
- return inst;
+ return err;
+}
+
+static void cryptd_aead_crypt(struct aead_request *req,
+ struct crypto_aead *child,
+ int err,
+ int (*crypt)(struct aead_request *req))
+{
+ struct cryptd_aead_request_ctx *rctx;
+ rctx = aead_request_ctx(req);
+
+ if (unlikely(err == -EINPROGRESS))
+ goto out;
+ aead_request_set_tfm(req, child);
+ err = crypt( req );
+ req->base.complete = rctx->complete;
+out:
+ local_bh_disable();
+ rctx->complete(&req->base, err);
+ local_bh_enable();
}
-static struct cryptd_state state;
+static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
+{
+ struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
+ struct crypto_aead *child = ctx->child;
+ struct aead_request *req;
-static struct crypto_instance *cryptd_alloc(struct rtattr **tb)
+ req = container_of(areq, struct aead_request, base);
+ cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->encrypt);
+}
+
+static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
+{
+ struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
+ struct crypto_aead *child = ctx->child;
+ struct aead_request *req;
+
+ req = container_of(areq, struct aead_request, base);
+ cryptd_aead_crypt(req, child, err, crypto_aead_crt(child)->decrypt);
+}
+
+static int cryptd_aead_enqueue(struct aead_request *req,
+ crypto_completion_t complete)
+{
+ struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
+ struct crypto_aead *tfm = crypto_aead_reqtfm(req);
+ struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
+
+ rctx->complete = req->base.complete;
+ req->base.complete = complete;
+ return cryptd_enqueue_request(queue, &req->base);
+}
+
+static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
+{
+ return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
+}
+
+static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
+{
+ return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
+}
+
+static int cryptd_aead_init_tfm(struct crypto_tfm *tfm)
+{
+ struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
+ struct aead_instance_ctx *ictx = crypto_instance_ctx(inst);
+ struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
+ struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct crypto_aead *cipher;
+
+ cipher = crypto_spawn_aead(spawn);
+ if (IS_ERR(cipher))
+ return PTR_ERR(cipher);
+
+ crypto_aead_set_flags(cipher, CRYPTO_TFM_REQ_MAY_SLEEP);
+ ctx->child = cipher;
+ tfm->crt_aead.reqsize = sizeof(struct cryptd_aead_request_ctx);
+ return 0;
+}
+
+static void cryptd_aead_exit_tfm(struct crypto_tfm *tfm)
+{
+ struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(tfm);
+ crypto_free_aead(ctx->child);
+}
+
+static int cryptd_create_aead(struct crypto_template *tmpl,
+ struct rtattr **tb,
+ struct cryptd_queue *queue)
+{
+ struct aead_instance_ctx *ctx;
+ struct crypto_instance *inst;
+ struct crypto_alg *alg;
+ int err;
+
+ alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_AEAD,
+ CRYPTO_ALG_TYPE_MASK);
+ if (IS_ERR(alg))
+ return PTR_ERR(alg);
+
+ inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
+ err = PTR_ERR(inst);
+ if (IS_ERR(inst))
+ goto out_put_alg;
+
+ ctx = crypto_instance_ctx(inst);
+ ctx->queue = queue;
+
+ err = crypto_init_spawn(&ctx->aead_spawn.base, alg, inst,
+ CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
+ if (err)
+ goto out_free_inst;
+
+ inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
+ inst->alg.cra_type = alg->cra_type;
+ inst->alg.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
+ inst->alg.cra_init = cryptd_aead_init_tfm;
+ inst->alg.cra_exit = cryptd_aead_exit_tfm;
+ inst->alg.cra_aead.setkey = alg->cra_aead.setkey;
+ inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize;
+ inst->alg.cra_aead.geniv = alg->cra_aead.geniv;
+ inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize;
+ inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
+ inst->alg.cra_aead.encrypt = cryptd_aead_encrypt_enqueue;
+ inst->alg.cra_aead.decrypt = cryptd_aead_decrypt_enqueue;
+ inst->alg.cra_aead.givencrypt = alg->cra_aead.givencrypt;
+ inst->alg.cra_aead.givdecrypt = alg->cra_aead.givdecrypt;
+
+ err = crypto_register_instance(tmpl, inst);
+ if (err) {
+ crypto_drop_spawn(&ctx->aead_spawn.base);
+out_free_inst:
+ kfree(inst);
+ }
+out_put_alg:
+ crypto_mod_put(alg);
+ return err;
+}
+
+static struct cryptd_queue queue;
+
+static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
{
struct crypto_attr_type *algt;
algt = crypto_get_attr_type(tb);
if (IS_ERR(algt))
- return ERR_CAST(algt);
+ return PTR_ERR(algt);
switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
case CRYPTO_ALG_TYPE_BLKCIPHER:
- return cryptd_alloc_blkcipher(tb, &state);
+ return cryptd_create_blkcipher(tmpl, tb, &queue);
case CRYPTO_ALG_TYPE_DIGEST:
- return cryptd_alloc_hash(tb, &state);
+ return cryptd_create_hash(tmpl, tb, &queue);
+ case CRYPTO_ALG_TYPE_AEAD:
+ return cryptd_create_aead(tmpl, tb, &queue);
}
- return ERR_PTR(-EINVAL);
+ return -EINVAL;
}
static void cryptd_free(struct crypto_instance *inst)
{
struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
-
- crypto_drop_spawn(&ctx->spawn);
- kfree(inst);
+ struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
+ struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
+
+ switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
+ case CRYPTO_ALG_TYPE_AHASH:
+ crypto_drop_shash(&hctx->spawn);
+ kfree(ahash_instance(inst));
+ return;
+ case CRYPTO_ALG_TYPE_AEAD:
+ crypto_drop_spawn(&aead_ctx->aead_spawn.base);
+ kfree(inst);
+ return;
+ default:
+ crypto_drop_spawn(&ctx->spawn);
+ kfree(inst);
+ }
}
static struct crypto_template cryptd_tmpl = {
.name = "cryptd",
- .alloc = cryptd_alloc,
+ .create = cryptd_create,
.free = cryptd_free,
.module = THIS_MODULE,
};
-static inline int cryptd_create_thread(struct cryptd_state *state,
- int (*fn)(void *data), const char *name)
-{
- spin_lock_init(&state->lock);
- mutex_init(&state->mutex);
- crypto_init_queue(&state->queue, CRYPTD_MAX_QLEN);
+struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
+ u32 type, u32 mask)
+{
+ char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
+ struct crypto_tfm *tfm;
+
+ if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
+ "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
+ return ERR_PTR(-EINVAL);
+ type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
+ type |= CRYPTO_ALG_TYPE_BLKCIPHER;
+ mask &= ~CRYPTO_ALG_TYPE_MASK;
+ mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
+ tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
+ if (IS_ERR(tfm))
+ return ERR_CAST(tfm);
+ if (tfm->__crt_alg->cra_module != THIS_MODULE) {
+ crypto_free_tfm(tfm);
+ return ERR_PTR(-EINVAL);
+ }
- state->task = kthread_run(fn, state, name);
- if (IS_ERR(state->task))
- return PTR_ERR(state->task);
+ return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
+}
+EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
- return 0;
+struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
+{
+ struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
+ return ctx->child;
}
+EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
-static inline void cryptd_stop_thread(struct cryptd_state *state)
+void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
{
- BUG_ON(state->queue.qlen);
- kthread_stop(state->task);
+ crypto_free_ablkcipher(&tfm->base);
}
+EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
-static int cryptd_thread(void *data)
+struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
+ u32 type, u32 mask)
{
- struct cryptd_state *state = data;
- int stop;
+ char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
+ struct crypto_ahash *tfm;
- current->flags |= PF_NOFREEZE;
+ if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
+ "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
+ return ERR_PTR(-EINVAL);
+ tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
+ if (IS_ERR(tfm))
+ return ERR_CAST(tfm);
+ if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
+ crypto_free_ahash(tfm);
+ return ERR_PTR(-EINVAL);
+ }
+
+ return __cryptd_ahash_cast(tfm);
+}
+EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
- do {
- struct crypto_async_request *req, *backlog;
+struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
+{
+ struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
- mutex_lock(&state->mutex);
- __set_current_state(TASK_INTERRUPTIBLE);
+ return ctx->child;
+}
+EXPORT_SYMBOL_GPL(cryptd_ahash_child);
- spin_lock_bh(&state->lock);
- backlog = crypto_get_backlog(&state->queue);
- req = crypto_dequeue_request(&state->queue);
- spin_unlock_bh(&state->lock);
+struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
+{
+ struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
+ return &rctx->desc;
+}
+EXPORT_SYMBOL_GPL(cryptd_shash_desc);
- stop = kthread_should_stop();
+void cryptd_free_ahash(struct cryptd_ahash *tfm)
+{
+ crypto_free_ahash(&tfm->base);
+}
+EXPORT_SYMBOL_GPL(cryptd_free_ahash);
- if (stop || req) {
- __set_current_state(TASK_RUNNING);
- if (req) {
- if (backlog)
- backlog->complete(backlog,
- -EINPROGRESS);
- req->complete(req, 0);
- }
- }
+struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
+ u32 type, u32 mask)
+{
+ char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
+ struct crypto_aead *tfm;
- mutex_unlock(&state->mutex);
+ if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
+ "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
+ return ERR_PTR(-EINVAL);
+ tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
+ if (IS_ERR(tfm))
+ return ERR_CAST(tfm);
+ if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
+ crypto_free_aead(tfm);
+ return ERR_PTR(-EINVAL);
+ }
+ return __cryptd_aead_cast(tfm);
+}
+EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
- schedule();
- } while (!stop);
+struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
+{
+ struct cryptd_aead_ctx *ctx;
+ ctx = crypto_aead_ctx(&tfm->base);
+ return ctx->child;
+}
+EXPORT_SYMBOL_GPL(cryptd_aead_child);
- return 0;
+void cryptd_free_aead(struct cryptd_aead *tfm)
+{
+ crypto_free_aead(&tfm->base);
}
+EXPORT_SYMBOL_GPL(cryptd_free_aead);
static int __init cryptd_init(void)
{
int err;
- err = cryptd_create_thread(&state, cryptd_thread, "cryptd");
+ err = cryptd_init_queue(&queue, CRYPTD_MAX_CPU_QLEN);
if (err)
return err;
err = crypto_register_template(&cryptd_tmpl);
if (err)
- kthread_stop(state.task);
+ cryptd_fini_queue(&queue);
return err;
}
static void __exit cryptd_exit(void)
{
- cryptd_stop_thread(&state);
+ cryptd_fini_queue(&queue);
crypto_unregister_template(&cryptd_tmpl);
}
-module_init(cryptd_init);
+subsys_initcall(cryptd_init);
module_exit(cryptd_exit);
MODULE_LICENSE("GPL");
diff --git a/crypto/crypto_null.c b/crypto/crypto_null.c
index cb71c9122bc..1dc54bb95a8 100644
--- a/crypto/crypto_null.c
+++ b/crypto/crypto_null.c
@@ -1,11 +1,11 @@
-/*
+/*
* Cryptographic API.
*
* Null algorithms, aka Much Ado About Nothing.
*
* These are needed for IPsec, and may be useful in general for
* testing & debugging.
- *
+ *
* The null cipher is compliant with RFC2410.
*
* Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
@@ -17,6 +17,7 @@
*
*/
+#include <crypto/null.h>
#include <crypto/internal/hash.h>
#include <crypto/internal/skcipher.h>
#include <linux/init.h>
@@ -24,11 +25,6 @@
#include <linux/mm.h>
#include <linux/string.h>
-#define NULL_KEY_SIZE 0
-#define NULL_BLOCK_SIZE 1
-#define NULL_DIGEST_SIZE 0
-#define NULL_IV_SIZE 0
-
static int null_compress(struct crypto_tfm *tfm, const u8 *src,
unsigned int slen, u8 *dst, unsigned int *dlen)
{
@@ -94,18 +90,6 @@ static int skcipher_null_crypt(struct blkcipher_desc *desc,
return err;
}
-static struct crypto_alg compress_null = {
- .cra_name = "compress_null",
- .cra_flags = CRYPTO_ALG_TYPE_COMPRESS,
- .cra_blocksize = NULL_BLOCK_SIZE,
- .cra_ctxsize = 0,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(compress_null.cra_list),
- .cra_u = { .compress = {
- .coa_compress = null_compress,
- .coa_decompress = null_compress } }
-};
-
static struct shash_alg digest_null = {
.digestsize = NULL_DIGEST_SIZE,
.setkey = null_hash_setkey,
@@ -122,22 +106,19 @@ static struct shash_alg digest_null = {
}
};
-static struct crypto_alg cipher_null = {
+static struct crypto_alg null_algs[3] = { {
.cra_name = "cipher_null",
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = NULL_BLOCK_SIZE,
.cra_ctxsize = 0,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(cipher_null.cra_list),
.cra_u = { .cipher = {
.cia_min_keysize = NULL_KEY_SIZE,
.cia_max_keysize = NULL_KEY_SIZE,
.cia_setkey = null_setkey,
.cia_encrypt = null_crypt,
.cia_decrypt = null_crypt } }
-};
-
-static struct crypto_alg skcipher_null = {
+}, {
.cra_name = "ecb(cipher_null)",
.cra_driver_name = "ecb-cipher_null",
.cra_priority = 100,
@@ -146,7 +127,6 @@ static struct crypto_alg skcipher_null = {
.cra_type = &crypto_blkcipher_type,
.cra_ctxsize = 0,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(skcipher_null.cra_list),
.cra_u = { .blkcipher = {
.min_keysize = NULL_KEY_SIZE,
.max_keysize = NULL_KEY_SIZE,
@@ -154,7 +134,16 @@ static struct crypto_alg skcipher_null = {
.setkey = null_setkey,
.encrypt = skcipher_null_crypt,
.decrypt = skcipher_null_crypt } }
-};
+}, {
+ .cra_name = "compress_null",
+ .cra_flags = CRYPTO_ALG_TYPE_COMPRESS,
+ .cra_blocksize = NULL_BLOCK_SIZE,
+ .cra_ctxsize = 0,
+ .cra_module = THIS_MODULE,
+ .cra_u = { .compress = {
+ .coa_compress = null_compress,
+ .coa_decompress = null_compress } }
+} };
MODULE_ALIAS("compress_null");
MODULE_ALIAS("digest_null");
@@ -163,41 +152,27 @@ MODULE_ALIAS("cipher_null");
static int __init crypto_null_mod_init(void)
{
int ret = 0;
-
- ret = crypto_register_alg(&cipher_null);
- if (ret < 0)
- goto out;
- ret = crypto_register_alg(&skcipher_null);
+ ret = crypto_register_algs(null_algs, ARRAY_SIZE(null_algs));
if (ret < 0)
- goto out_unregister_cipher;
+ goto out;
ret = crypto_register_shash(&digest_null);
if (ret < 0)
- goto out_unregister_skcipher;
+ goto out_unregister_algs;
- ret = crypto_register_alg(&compress_null);
- if (ret < 0)
- goto out_unregister_digest;
+ return 0;
-out:
+out_unregister_algs:
+ crypto_unregister_algs(null_algs, ARRAY_SIZE(null_algs));
+out:
return ret;
-
-out_unregister_digest:
- crypto_unregister_shash(&digest_null);
-out_unregister_skcipher:
- crypto_unregister_alg(&skcipher_null);
-out_unregister_cipher:
- crypto_unregister_alg(&cipher_null);
- goto out;
}
static void __exit crypto_null_mod_fini(void)
{
- crypto_unregister_alg(&compress_null);
crypto_unregister_shash(&digest_null);
- crypto_unregister_alg(&skcipher_null);
- crypto_unregister_alg(&cipher_null);
+ crypto_unregister_algs(null_algs, ARRAY_SIZE(null_algs));
}
module_init(crypto_null_mod_init);
diff --git a/crypto/crypto_user.c b/crypto/crypto_user.c
new file mode 100644
index 00000000000..e2a34feec7a
--- /dev/null
+++ b/crypto/crypto_user.c
@@ -0,0 +1,539 @@
+/*
+ * Crypto user configuration API.
+ *
+ * Copyright (C) 2011 secunet Security Networks AG
+ * Copyright (C) 2011 Steffen Klassert <steffen.klassert@secunet.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ */
+
+#include <linux/module.h>
+#include <linux/crypto.h>
+#include <linux/cryptouser.h>
+#include <linux/sched.h>
+#include <net/netlink.h>
+#include <linux/security.h>
+#include <net/net_namespace.h>
+#include <crypto/internal/aead.h>
+#include <crypto/internal/skcipher.h>
+
+#include "internal.h"
+
+#define null_terminated(x) (strnlen(x, sizeof(x)) < sizeof(x))
+
+static DEFINE_MUTEX(crypto_cfg_mutex);
+
+/* The crypto netlink socket */
+static struct sock *crypto_nlsk;
+
+struct crypto_dump_info {
+ struct sk_buff *in_skb;
+ struct sk_buff *out_skb;
+ u32 nlmsg_seq;
+ u16 nlmsg_flags;
+};
+
+static struct crypto_alg *crypto_alg_match(struct crypto_user_alg *p, int exact)
+{
+ struct crypto_alg *q, *alg = NULL;
+
+ down_read(&crypto_alg_sem);
+
+ list_for_each_entry(q, &crypto_alg_list, cra_list) {
+ int match = 0;
+
+ if ((q->cra_flags ^ p->cru_type) & p->cru_mask)
+ continue;
+
+ if (strlen(p->cru_driver_name))
+ match = !strcmp(q->cra_driver_name,
+ p->cru_driver_name);
+ else if (!exact)
+ match = !strcmp(q->cra_name, p->cru_name);
+
+ if (match) {
+ alg = q;
+ break;
+ }
+ }
+
+ up_read(&crypto_alg_sem);
+
+ return alg;
+}
+
+static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_cipher rcipher;
+
+ strncpy(rcipher.type, "cipher", sizeof(rcipher.type));
+
+ rcipher.blocksize = alg->cra_blocksize;
+ rcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
+ rcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
+
+ if (nla_put(skb, CRYPTOCFGA_REPORT_CIPHER,
+ sizeof(struct crypto_report_cipher), &rcipher))
+ goto nla_put_failure;
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+
+static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_comp rcomp;
+
+ strncpy(rcomp.type, "compression", sizeof(rcomp.type));
+ if (nla_put(skb, CRYPTOCFGA_REPORT_COMPRESS,
+ sizeof(struct crypto_report_comp), &rcomp))
+ goto nla_put_failure;
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+
+static int crypto_report_one(struct crypto_alg *alg,
+ struct crypto_user_alg *ualg, struct sk_buff *skb)
+{
+ strncpy(ualg->cru_name, alg->cra_name, sizeof(ualg->cru_name));
+ strncpy(ualg->cru_driver_name, alg->cra_driver_name,
+ sizeof(ualg->cru_driver_name));
+ strncpy(ualg->cru_module_name, module_name(alg->cra_module),
+ sizeof(ualg->cru_module_name));
+
+ ualg->cru_type = 0;
+ ualg->cru_mask = 0;
+ ualg->cru_flags = alg->cra_flags;
+ ualg->cru_refcnt = atomic_read(&alg->cra_refcnt);
+
+ if (nla_put_u32(skb, CRYPTOCFGA_PRIORITY_VAL, alg->cra_priority))
+ goto nla_put_failure;
+ if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
+ struct crypto_report_larval rl;
+
+ strncpy(rl.type, "larval", sizeof(rl.type));
+ if (nla_put(skb, CRYPTOCFGA_REPORT_LARVAL,
+ sizeof(struct crypto_report_larval), &rl))
+ goto nla_put_failure;
+ goto out;
+ }
+
+ if (alg->cra_type && alg->cra_type->report) {
+ if (alg->cra_type->report(skb, alg))
+ goto nla_put_failure;
+
+ goto out;
+ }
+
+ switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) {
+ case CRYPTO_ALG_TYPE_CIPHER:
+ if (crypto_report_cipher(skb, alg))
+ goto nla_put_failure;
+
+ break;
+ case CRYPTO_ALG_TYPE_COMPRESS:
+ if (crypto_report_comp(skb, alg))
+ goto nla_put_failure;
+
+ break;
+ }
+
+out:
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+
+static int crypto_report_alg(struct crypto_alg *alg,
+ struct crypto_dump_info *info)
+{
+ struct sk_buff *in_skb = info->in_skb;
+ struct sk_buff *skb = info->out_skb;
+ struct nlmsghdr *nlh;
+ struct crypto_user_alg *ualg;
+ int err = 0;
+
+ nlh = nlmsg_put(skb, NETLINK_CB(in_skb).portid, info->nlmsg_seq,
+ CRYPTO_MSG_GETALG, sizeof(*ualg), info->nlmsg_flags);
+ if (!nlh) {
+ err = -EMSGSIZE;
+ goto out;
+ }
+
+ ualg = nlmsg_data(nlh);
+
+ err = crypto_report_one(alg, ualg, skb);
+ if (err) {
+ nlmsg_cancel(skb, nlh);
+ goto out;
+ }
+
+ nlmsg_end(skb, nlh);
+
+out:
+ return err;
+}
+
+static int crypto_report(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
+ struct nlattr **attrs)
+{
+ struct crypto_user_alg *p = nlmsg_data(in_nlh);
+ struct crypto_alg *alg;
+ struct sk_buff *skb;
+ struct crypto_dump_info info;
+ int err;
+
+ if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
+ return -EINVAL;
+
+ if (!p->cru_driver_name[0])
+ return -EINVAL;
+
+ alg = crypto_alg_match(p, 1);
+ if (!alg)
+ return -ENOENT;
+
+ skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_ATOMIC);
+ if (!skb)
+ return -ENOMEM;
+
+ info.in_skb = in_skb;
+ info.out_skb = skb;
+ info.nlmsg_seq = in_nlh->nlmsg_seq;
+ info.nlmsg_flags = 0;
+
+ err = crypto_report_alg(alg, &info);
+ if (err)
+ return err;
+
+ return nlmsg_unicast(crypto_nlsk, skb, NETLINK_CB(in_skb).portid);
+}
+
+static int crypto_dump_report(struct sk_buff *skb, struct netlink_callback *cb)
+{
+ struct crypto_alg *alg;
+ struct crypto_dump_info info;
+ int err;
+
+ if (cb->args[0])
+ goto out;
+
+ cb->args[0] = 1;
+
+ info.in_skb = cb->skb;
+ info.out_skb = skb;
+ info.nlmsg_seq = cb->nlh->nlmsg_seq;
+ info.nlmsg_flags = NLM_F_MULTI;
+
+ list_for_each_entry(alg, &crypto_alg_list, cra_list) {
+ err = crypto_report_alg(alg, &info);
+ if (err)
+ goto out_err;
+ }
+
+out:
+ return skb->len;
+out_err:
+ return err;
+}
+
+static int crypto_dump_report_done(struct netlink_callback *cb)
+{
+ return 0;
+}
+
+static int crypto_update_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
+ struct nlattr **attrs)
+{
+ struct crypto_alg *alg;
+ struct crypto_user_alg *p = nlmsg_data(nlh);
+ struct nlattr *priority = attrs[CRYPTOCFGA_PRIORITY_VAL];
+ LIST_HEAD(list);
+
+ if (!netlink_capable(skb, CAP_NET_ADMIN))
+ return -EPERM;
+
+ if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
+ return -EINVAL;
+
+ if (priority && !strlen(p->cru_driver_name))
+ return -EINVAL;
+
+ alg = crypto_alg_match(p, 1);
+ if (!alg)
+ return -ENOENT;
+
+ down_write(&crypto_alg_sem);
+
+ crypto_remove_spawns(alg, &list, NULL);
+
+ if (priority)
+ alg->cra_priority = nla_get_u32(priority);
+
+ up_write(&crypto_alg_sem);
+
+ crypto_remove_final(&list);
+
+ return 0;
+}
+
+static int crypto_del_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
+ struct nlattr **attrs)
+{
+ struct crypto_alg *alg;
+ struct crypto_user_alg *p = nlmsg_data(nlh);
+
+ if (!netlink_capable(skb, CAP_NET_ADMIN))
+ return -EPERM;
+
+ if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
+ return -EINVAL;
+
+ alg = crypto_alg_match(p, 1);
+ if (!alg)
+ return -ENOENT;
+
+ /* We can not unregister core algorithms such as aes-generic.
+ * We would loose the reference in the crypto_alg_list to this algorithm
+ * if we try to unregister. Unregistering such an algorithm without
+ * removing the module is not possible, so we restrict to crypto
+ * instances that are build from templates. */
+ if (!(alg->cra_flags & CRYPTO_ALG_INSTANCE))
+ return -EINVAL;
+
+ if (atomic_read(&alg->cra_refcnt) != 1)
+ return -EBUSY;
+
+ return crypto_unregister_instance(alg);
+}
+
+static struct crypto_alg *crypto_user_skcipher_alg(const char *name, u32 type,
+ u32 mask)
+{
+ int err;
+ struct crypto_alg *alg;
+
+ type = crypto_skcipher_type(type);
+ mask = crypto_skcipher_mask(mask);
+
+ for (;;) {
+ alg = crypto_lookup_skcipher(name, type, mask);
+ if (!IS_ERR(alg))
+ return alg;
+
+ err = PTR_ERR(alg);
+ if (err != -EAGAIN)
+ break;
+ if (signal_pending(current)) {
+ err = -EINTR;
+ break;
+ }
+ }
+
+ return ERR_PTR(err);
+}
+
+static struct crypto_alg *crypto_user_aead_alg(const char *name, u32 type,
+ u32 mask)
+{
+ int err;
+ struct crypto_alg *alg;
+
+ type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
+ type |= CRYPTO_ALG_TYPE_AEAD;
+ mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
+ mask |= CRYPTO_ALG_TYPE_MASK;
+
+ for (;;) {
+ alg = crypto_lookup_aead(name, type, mask);
+ if (!IS_ERR(alg))
+ return alg;
+
+ err = PTR_ERR(alg);
+ if (err != -EAGAIN)
+ break;
+ if (signal_pending(current)) {
+ err = -EINTR;
+ break;
+ }
+ }
+
+ return ERR_PTR(err);
+}
+
+static int crypto_add_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
+ struct nlattr **attrs)
+{
+ int exact = 0;
+ const char *name;
+ struct crypto_alg *alg;
+ struct crypto_user_alg *p = nlmsg_data(nlh);
+ struct nlattr *priority = attrs[CRYPTOCFGA_PRIORITY_VAL];
+
+ if (!netlink_capable(skb, CAP_NET_ADMIN))
+ return -EPERM;
+
+ if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
+ return -EINVAL;
+
+ if (strlen(p->cru_driver_name))
+ exact = 1;
+
+ if (priority && !exact)
+ return -EINVAL;
+
+ alg = crypto_alg_match(p, exact);
+ if (alg)
+ return -EEXIST;
+
+ if (strlen(p->cru_driver_name))
+ name = p->cru_driver_name;
+ else
+ name = p->cru_name;
+
+ switch (p->cru_type & p->cru_mask & CRYPTO_ALG_TYPE_MASK) {
+ case CRYPTO_ALG_TYPE_AEAD:
+ alg = crypto_user_aead_alg(name, p->cru_type, p->cru_mask);
+ break;
+ case CRYPTO_ALG_TYPE_GIVCIPHER:
+ case CRYPTO_ALG_TYPE_BLKCIPHER:
+ case CRYPTO_ALG_TYPE_ABLKCIPHER:
+ alg = crypto_user_skcipher_alg(name, p->cru_type, p->cru_mask);
+ break;
+ default:
+ alg = crypto_alg_mod_lookup(name, p->cru_type, p->cru_mask);
+ }
+
+ if (IS_ERR(alg))
+ return PTR_ERR(alg);
+
+ down_write(&crypto_alg_sem);
+
+ if (priority)
+ alg->cra_priority = nla_get_u32(priority);
+
+ up_write(&crypto_alg_sem);
+
+ crypto_mod_put(alg);
+
+ return 0;
+}
+
+#define MSGSIZE(type) sizeof(struct type)
+
+static const int crypto_msg_min[CRYPTO_NR_MSGTYPES] = {
+ [CRYPTO_MSG_NEWALG - CRYPTO_MSG_BASE] = MSGSIZE(crypto_user_alg),
+ [CRYPTO_MSG_DELALG - CRYPTO_MSG_BASE] = MSGSIZE(crypto_user_alg),
+ [CRYPTO_MSG_UPDATEALG - CRYPTO_MSG_BASE] = MSGSIZE(crypto_user_alg),
+ [CRYPTO_MSG_GETALG - CRYPTO_MSG_BASE] = MSGSIZE(crypto_user_alg),
+};
+
+static const struct nla_policy crypto_policy[CRYPTOCFGA_MAX+1] = {
+ [CRYPTOCFGA_PRIORITY_VAL] = { .type = NLA_U32},
+};
+
+#undef MSGSIZE
+
+static const struct crypto_link {
+ int (*doit)(struct sk_buff *, struct nlmsghdr *, struct nlattr **);
+ int (*dump)(struct sk_buff *, struct netlink_callback *);
+ int (*done)(struct netlink_callback *);
+} crypto_dispatch[CRYPTO_NR_MSGTYPES] = {
+ [CRYPTO_MSG_NEWALG - CRYPTO_MSG_BASE] = { .doit = crypto_add_alg},
+ [CRYPTO_MSG_DELALG - CRYPTO_MSG_BASE] = { .doit = crypto_del_alg},
+ [CRYPTO_MSG_UPDATEALG - CRYPTO_MSG_BASE] = { .doit = crypto_update_alg},
+ [CRYPTO_MSG_GETALG - CRYPTO_MSG_BASE] = { .doit = crypto_report,
+ .dump = crypto_dump_report,
+ .done = crypto_dump_report_done},
+};
+
+static int crypto_user_rcv_msg(struct sk_buff *skb, struct nlmsghdr *nlh)
+{
+ struct nlattr *attrs[CRYPTOCFGA_MAX+1];
+ const struct crypto_link *link;
+ int type, err;
+
+ type = nlh->nlmsg_type;
+ if (type > CRYPTO_MSG_MAX)
+ return -EINVAL;
+
+ type -= CRYPTO_MSG_BASE;
+ link = &crypto_dispatch[type];
+
+ if ((type == (CRYPTO_MSG_GETALG - CRYPTO_MSG_BASE) &&
+ (nlh->nlmsg_flags & NLM_F_DUMP))) {
+ struct crypto_alg *alg;
+ u16 dump_alloc = 0;
+
+ if (link->dump == NULL)
+ return -EINVAL;
+
+ list_for_each_entry(alg, &crypto_alg_list, cra_list)
+ dump_alloc += CRYPTO_REPORT_MAXSIZE;
+
+ {
+ struct netlink_dump_control c = {
+ .dump = link->dump,
+ .done = link->done,
+ .min_dump_alloc = dump_alloc,
+ };
+ return netlink_dump_start(crypto_nlsk, skb, nlh, &c);
+ }
+ }
+
+ err = nlmsg_parse(nlh, crypto_msg_min[type], attrs, CRYPTOCFGA_MAX,
+ crypto_policy);
+ if (err < 0)
+ return err;
+
+ if (link->doit == NULL)
+ return -EINVAL;
+
+ return link->doit(skb, nlh, attrs);
+}
+
+static void crypto_netlink_rcv(struct sk_buff *skb)
+{
+ mutex_lock(&crypto_cfg_mutex);
+ netlink_rcv_skb(skb, &crypto_user_rcv_msg);
+ mutex_unlock(&crypto_cfg_mutex);
+}
+
+static int __init crypto_user_init(void)
+{
+ struct netlink_kernel_cfg cfg = {
+ .input = crypto_netlink_rcv,
+ };
+
+ crypto_nlsk = netlink_kernel_create(&init_net, NETLINK_CRYPTO, &cfg);
+ if (!crypto_nlsk)
+ return -ENOMEM;
+
+ return 0;
+}
+
+static void __exit crypto_user_exit(void)
+{
+ netlink_kernel_release(crypto_nlsk);
+}
+
+module_init(crypto_user_init);
+module_exit(crypto_user_exit);
+MODULE_LICENSE("GPL");
+MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
+MODULE_DESCRIPTION("Crypto userspace configuration API");
diff --git a/crypto/crypto_wq.c b/crypto/crypto_wq.c
new file mode 100644
index 00000000000..2f1b8d12952
--- /dev/null
+++ b/crypto/crypto_wq.c
@@ -0,0 +1,40 @@
+/*
+ * Workqueue for crypto subsystem
+ *
+ * Copyright (c) 2009 Intel Corp.
+ * Author: Huang Ying <ying.huang@intel.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <linux/workqueue.h>
+#include <linux/module.h>
+#include <crypto/algapi.h>
+#include <crypto/crypto_wq.h>
+
+struct workqueue_struct *kcrypto_wq;
+EXPORT_SYMBOL_GPL(kcrypto_wq);
+
+static int __init crypto_wq_init(void)
+{
+ kcrypto_wq = alloc_workqueue("crypto",
+ WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE, 1);
+ if (unlikely(!kcrypto_wq))
+ return -ENOMEM;
+ return 0;
+}
+
+static void __exit crypto_wq_exit(void)
+{
+ destroy_workqueue(kcrypto_wq);
+}
+
+subsys_initcall(crypto_wq_init);
+module_exit(crypto_wq_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Workqueue for crypto subsystem");
diff --git a/crypto/ctr.c b/crypto/ctr.c
index 2d7425f0e7b..f2b94f27bb2 100644
--- a/crypto/ctr.c
+++ b/crypto/ctr.c
@@ -12,6 +12,7 @@
#include <crypto/algapi.h>
#include <crypto/ctr.h>
+#include <crypto/internal/skcipher.h>
#include <linux/err.h>
#include <linux/init.h>
#include <linux/kernel.h>
@@ -25,10 +26,15 @@ struct crypto_ctr_ctx {
};
struct crypto_rfc3686_ctx {
- struct crypto_blkcipher *child;
+ struct crypto_ablkcipher *child;
u8 nonce[CTR_RFC3686_NONCE_SIZE];
};
+struct crypto_rfc3686_req_ctx {
+ u8 iv[CTR_RFC3686_BLOCK_SIZE];
+ struct ablkcipher_request subreq CRYPTO_MINALIGN_ATTR;
+};
+
static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key,
unsigned int keylen)
{
@@ -185,7 +191,7 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
CRYPTO_ALG_TYPE_MASK);
if (IS_ERR(alg))
- return ERR_PTR(PTR_ERR(alg));
+ return ERR_CAST(alg);
/* Block size must be >= 4 bytes. */
err = -EINVAL;
@@ -219,6 +225,8 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
+ inst->alg.cra_blkcipher.geniv = "chainiv";
+
out:
crypto_mod_put(alg);
return inst;
@@ -241,11 +249,11 @@ static struct crypto_template crypto_ctr_tmpl = {
.module = THIS_MODULE,
};
-static int crypto_rfc3686_setkey(struct crypto_tfm *parent, const u8 *key,
- unsigned int keylen)
+static int crypto_rfc3686_setkey(struct crypto_ablkcipher *parent,
+ const u8 *key, unsigned int keylen)
{
- struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(parent);
- struct crypto_blkcipher *child = ctx->child;
+ struct crypto_rfc3686_ctx *ctx = crypto_ablkcipher_ctx(parent);
+ struct crypto_ablkcipher *child = ctx->child;
int err;
/* the nonce is stored in bytes at end of key */
@@ -257,59 +265,64 @@ static int crypto_rfc3686_setkey(struct crypto_tfm *parent, const u8 *key,
keylen -= CTR_RFC3686_NONCE_SIZE;
- crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
- crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) &
- CRYPTO_TFM_REQ_MASK);
- err = crypto_blkcipher_setkey(child, key, keylen);
- crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
+ crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
+ crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
+ CRYPTO_TFM_REQ_MASK);
+ err = crypto_ablkcipher_setkey(child, key, keylen);
+ crypto_ablkcipher_set_flags(parent, crypto_ablkcipher_get_flags(child) &
+ CRYPTO_TFM_RES_MASK);
return err;
}
-static int crypto_rfc3686_crypt(struct blkcipher_desc *desc,
- struct scatterlist *dst,
- struct scatterlist *src, unsigned int nbytes)
+static int crypto_rfc3686_crypt(struct ablkcipher_request *req)
{
- struct crypto_blkcipher *tfm = desc->tfm;
- struct crypto_rfc3686_ctx *ctx = crypto_blkcipher_ctx(tfm);
- struct crypto_blkcipher *child = ctx->child;
- unsigned long alignmask = crypto_blkcipher_alignmask(tfm);
- u8 ivblk[CTR_RFC3686_BLOCK_SIZE + alignmask];
- u8 *iv = PTR_ALIGN(ivblk + 0, alignmask + 1);
- u8 *info = desc->info;
- int err;
+ struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
+ struct crypto_rfc3686_ctx *ctx = crypto_ablkcipher_ctx(tfm);
+ struct crypto_ablkcipher *child = ctx->child;
+ unsigned long align = crypto_ablkcipher_alignmask(tfm);
+ struct crypto_rfc3686_req_ctx *rctx =
+ (void *)PTR_ALIGN((u8 *)ablkcipher_request_ctx(req), align + 1);
+ struct ablkcipher_request *subreq = &rctx->subreq;
+ u8 *iv = rctx->iv;
/* set up counter block */
memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
- memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE);
+ memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->info, CTR_RFC3686_IV_SIZE);
/* initialize counter portion of counter block */
*(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
cpu_to_be32(1);
- desc->tfm = child;
- desc->info = iv;
- err = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
- desc->tfm = tfm;
- desc->info = info;
+ ablkcipher_request_set_tfm(subreq, child);
+ ablkcipher_request_set_callback(subreq, req->base.flags,
+ req->base.complete, req->base.data);
+ ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->nbytes,
+ iv);
- return err;
+ return crypto_ablkcipher_encrypt(subreq);
}
static int crypto_rfc3686_init_tfm(struct crypto_tfm *tfm)
{
struct crypto_instance *inst = (void *)tfm->__crt_alg;
- struct crypto_spawn *spawn = crypto_instance_ctx(inst);
+ struct crypto_skcipher_spawn *spawn = crypto_instance_ctx(inst);
struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
- struct crypto_blkcipher *cipher;
+ struct crypto_ablkcipher *cipher;
+ unsigned long align;
- cipher = crypto_spawn_blkcipher(spawn);
+ cipher = crypto_spawn_skcipher(spawn);
if (IS_ERR(cipher))
return PTR_ERR(cipher);
ctx->child = cipher;
+ align = crypto_tfm_alg_alignmask(tfm);
+ align &= ~(crypto_tfm_ctx_alignment() - 1);
+ tfm->crt_ablkcipher.reqsize = align +
+ sizeof(struct crypto_rfc3686_req_ctx) +
+ crypto_ablkcipher_reqsize(cipher);
+
return 0;
}
@@ -317,74 +330,108 @@ static void crypto_rfc3686_exit_tfm(struct crypto_tfm *tfm)
{
struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
- crypto_free_blkcipher(ctx->child);
+ crypto_free_ablkcipher(ctx->child);
}
static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb)
{
+ struct crypto_attr_type *algt;
struct crypto_instance *inst;
struct crypto_alg *alg;
+ struct crypto_skcipher_spawn *spawn;
+ const char *cipher_name;
int err;
- err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
+ algt = crypto_get_attr_type(tb);
+ if (IS_ERR(algt))
+ return ERR_CAST(algt);
+
+ if ((algt->type ^ CRYPTO_ALG_TYPE_BLKCIPHER) & algt->mask)
+ return ERR_PTR(-EINVAL);
+
+ cipher_name = crypto_attr_alg_name(tb[1]);
+ if (IS_ERR(cipher_name))
+ return ERR_CAST(cipher_name);
+
+ inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+ if (!inst)
+ return ERR_PTR(-ENOMEM);
+
+ spawn = crypto_instance_ctx(inst);
+
+ crypto_set_skcipher_spawn(spawn, inst);
+ err = crypto_grab_skcipher(spawn, cipher_name, 0,
+ crypto_requires_sync(algt->type,
+ algt->mask));
if (err)
- return ERR_PTR(err);
+ goto err_free_inst;
- alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER,
- CRYPTO_ALG_TYPE_MASK);
- err = PTR_ERR(alg);
- if (IS_ERR(alg))
- return ERR_PTR(err);
+ alg = crypto_skcipher_spawn_alg(spawn);
/* We only support 16-byte blocks. */
err = -EINVAL;
- if (alg->cra_blkcipher.ivsize != CTR_RFC3686_BLOCK_SIZE)
- goto out_put_alg;
+ if (alg->cra_ablkcipher.ivsize != CTR_RFC3686_BLOCK_SIZE)
+ goto err_drop_spawn;
/* Not a stream cipher? */
if (alg->cra_blocksize != 1)
- goto out_put_alg;
+ goto err_drop_spawn;
- inst = crypto_alloc_instance("rfc3686", alg);
- if (IS_ERR(inst))
- goto out;
+ err = -ENAMETOOLONG;
+ if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "rfc3686(%s)",
+ alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
+ goto err_drop_spawn;
+ if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+ "rfc3686(%s)", alg->cra_driver_name) >=
+ CRYPTO_MAX_ALG_NAME)
+ goto err_drop_spawn;
- inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
inst->alg.cra_priority = alg->cra_priority;
inst->alg.cra_blocksize = 1;
inst->alg.cra_alignmask = alg->cra_alignmask;
- inst->alg.cra_type = &crypto_blkcipher_type;
- inst->alg.cra_blkcipher.ivsize = CTR_RFC3686_IV_SIZE;
- inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize
- + CTR_RFC3686_NONCE_SIZE;
- inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize
- + CTR_RFC3686_NONCE_SIZE;
+ inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
+ (alg->cra_flags & CRYPTO_ALG_ASYNC);
+ inst->alg.cra_type = &crypto_ablkcipher_type;
+
+ inst->alg.cra_ablkcipher.ivsize = CTR_RFC3686_IV_SIZE;
+ inst->alg.cra_ablkcipher.min_keysize =
+ alg->cra_ablkcipher.min_keysize + CTR_RFC3686_NONCE_SIZE;
+ inst->alg.cra_ablkcipher.max_keysize =
+ alg->cra_ablkcipher.max_keysize + CTR_RFC3686_NONCE_SIZE;
- inst->alg.cra_blkcipher.geniv = "seqiv";
+ inst->alg.cra_ablkcipher.geniv = "seqiv";
+
+ inst->alg.cra_ablkcipher.setkey = crypto_rfc3686_setkey;
+ inst->alg.cra_ablkcipher.encrypt = crypto_rfc3686_crypt;
+ inst->alg.cra_ablkcipher.decrypt = crypto_rfc3686_crypt;
inst->alg.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
inst->alg.cra_init = crypto_rfc3686_init_tfm;
inst->alg.cra_exit = crypto_rfc3686_exit_tfm;
- inst->alg.cra_blkcipher.setkey = crypto_rfc3686_setkey;
- inst->alg.cra_blkcipher.encrypt = crypto_rfc3686_crypt;
- inst->alg.cra_blkcipher.decrypt = crypto_rfc3686_crypt;
-
-out:
- crypto_mod_put(alg);
return inst;
-out_put_alg:
- inst = ERR_PTR(err);
- goto out;
+err_drop_spawn:
+ crypto_drop_skcipher(spawn);
+err_free_inst:
+ kfree(inst);
+ return ERR_PTR(err);
+}
+
+static void crypto_rfc3686_free(struct crypto_instance *inst)
+{
+ struct crypto_skcipher_spawn *spawn = crypto_instance_ctx(inst);
+
+ crypto_drop_skcipher(spawn);
+ kfree(inst);
}
static struct crypto_template crypto_rfc3686_tmpl = {
.name = "rfc3686",
.alloc = crypto_rfc3686_alloc,
- .free = crypto_ctr_free,
+ .free = crypto_rfc3686_free,
.module = THIS_MODULE,
};
diff --git a/crypto/cts.c b/crypto/cts.c
index ccf9c5de395..042223f8e73 100644
--- a/crypto/cts.c
+++ b/crypto/cts.c
@@ -282,9 +282,8 @@ static struct crypto_instance *crypto_cts_alloc(struct rtattr **tb)
alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER,
CRYPTO_ALG_TYPE_MASK);
- err = PTR_ERR(alg);
if (IS_ERR(alg))
- return ERR_PTR(err);
+ return ERR_CAST(alg);
inst = ERR_PTR(-EINVAL);
if (!is_power_of_2(alg->cra_blocksize))
diff --git a/crypto/deflate.c b/crypto/deflate.c
index 9128da44e95..b57d70eb156 100644
--- a/crypto/deflate.c
+++ b/crypto/deflate.c
@@ -1,14 +1,14 @@
-/*
+/*
* Cryptographic API.
*
* Deflate algorithm (RFC 1951), implemented here primarily for use
* by IPCOMP (RFC 3173 & RFC 2394).
*
* Copyright (c) 2003 James Morris <jmorris@intercode.com.au>
- *
+ *
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
+ * Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* FIXME: deflate transforms will require up to a total of about 436k of kernel
@@ -32,7 +32,6 @@
#include <linux/interrupt.h>
#include <linux/mm.h>
#include <linux/net.h>
-#include <linux/slab.h>
#define DEFLATE_DEF_LEVEL Z_DEFAULT_COMPRESSION
#define DEFLATE_DEF_WINBITS 11
@@ -48,12 +47,12 @@ static int deflate_comp_init(struct deflate_ctx *ctx)
int ret = 0;
struct z_stream_s *stream = &ctx->comp_stream;
- stream->workspace = vmalloc(zlib_deflate_workspacesize());
- if (!stream->workspace ) {
+ stream->workspace = vzalloc(zlib_deflate_workspacesize(
+ -DEFLATE_DEF_WINBITS, DEFLATE_DEF_MEMLEVEL));
+ if (!stream->workspace) {
ret = -ENOMEM;
goto out;
}
- memset(stream->workspace, 0, zlib_deflate_workspacesize());
ret = zlib_deflateInit2(stream, DEFLATE_DEF_LEVEL, Z_DEFLATED,
-DEFLATE_DEF_WINBITS, DEFLATE_DEF_MEMLEVEL,
Z_DEFAULT_STRATEGY);
@@ -61,7 +60,7 @@ static int deflate_comp_init(struct deflate_ctx *ctx)
ret = -EINVAL;
goto out_free;
}
-out:
+out:
return ret;
out_free:
vfree(stream->workspace);
@@ -73,8 +72,8 @@ static int deflate_decomp_init(struct deflate_ctx *ctx)
int ret = 0;
struct z_stream_s *stream = &ctx->decomp_stream;
- stream->workspace = kzalloc(zlib_inflate_workspacesize(), GFP_KERNEL);
- if (!stream->workspace ) {
+ stream->workspace = vzalloc(zlib_inflate_workspacesize());
+ if (!stream->workspace) {
ret = -ENOMEM;
goto out;
}
@@ -86,7 +85,7 @@ static int deflate_decomp_init(struct deflate_ctx *ctx)
out:
return ret;
out_free:
- kfree(stream->workspace);
+ vfree(stream->workspace);
goto out;
}
@@ -99,14 +98,14 @@ static void deflate_comp_exit(struct deflate_ctx *ctx)
static void deflate_decomp_exit(struct deflate_ctx *ctx)
{
zlib_inflateEnd(&ctx->decomp_stream);
- kfree(ctx->decomp_stream.workspace);
+ vfree(ctx->decomp_stream.workspace);
}
static int deflate_init(struct crypto_tfm *tfm)
{
struct deflate_ctx *ctx = crypto_tfm_ctx(tfm);
int ret;
-
+
ret = deflate_comp_init(ctx);
if (ret)
goto out;
@@ -153,11 +152,11 @@ static int deflate_compress(struct crypto_tfm *tfm, const u8 *src,
out:
return ret;
}
-
+
static int deflate_decompress(struct crypto_tfm *tfm, const u8 *src,
unsigned int slen, u8 *dst, unsigned int *dlen)
{
-
+
int ret = 0;
struct deflate_ctx *dctx = crypto_tfm_ctx(tfm);
struct z_stream_s *stream = &dctx->decomp_stream;
@@ -182,7 +181,7 @@ static int deflate_decompress(struct crypto_tfm *tfm, const u8 *src,
if (ret == Z_OK && !stream->avail_in && stream->avail_out) {
u8 zerostuff = 0;
stream->next_in = &zerostuff;
- stream->avail_in = 1;
+ stream->avail_in = 1;
ret = zlib_inflate(stream, Z_FINISH);
}
if (ret != Z_STREAM_END) {
@@ -200,7 +199,6 @@ static struct crypto_alg alg = {
.cra_flags = CRYPTO_ALG_TYPE_COMPRESS,
.cra_ctxsize = sizeof(struct deflate_ctx),
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(alg.cra_list),
.cra_init = deflate_init,
.cra_exit = deflate_exit,
.cra_u = { .compress = {
diff --git a/crypto/des_generic.c b/crypto/des_generic.c
index 5bd3ee345a6..f6cf63f8846 100644
--- a/crypto/des_generic.c
+++ b/crypto/des_generic.c
@@ -614,7 +614,7 @@ static const u32 S8[64] = {
#define T3(x) pt[2 * (x) + 2]
#define T4(x) pt[2 * (x) + 3]
-#define PC2(a, b, c, d) (T4(d) | T3(c) | T2(b) | T1(a))
+#define DES_PC2(a, b, c, d) (T4(d) | T3(c) | T2(b) | T1(a))
/*
* Encryption key expansion
@@ -639,22 +639,22 @@ unsigned long des_ekey(u32 *pe, const u8 *k)
b = k[6]; b &= 0x0e; b <<= 4; b |= k[2] & 0x1e; b = pc1[b];
a = k[7]; a &= 0x0e; a <<= 4; a |= k[3] & 0x1e; a = pc1[a];
- pe[15 * 2 + 0] = PC2(a, b, c, d); d = rs[d];
- pe[14 * 2 + 0] = PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[13 * 2 + 0] = PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[12 * 2 + 0] = PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[11 * 2 + 0] = PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[10 * 2 + 0] = PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 9 * 2 + 0] = PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 8 * 2 + 0] = PC2(d, a, b, c); c = rs[c];
- pe[ 7 * 2 + 0] = PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 6 * 2 + 0] = PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[ 5 * 2 + 0] = PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 4 * 2 + 0] = PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[ 3 * 2 + 0] = PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 2 * 2 + 0] = PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[ 1 * 2 + 0] = PC2(c, d, a, b); b = rs[b];
- pe[ 0 * 2 + 0] = PC2(b, c, d, a);
+ pe[15 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d];
+ pe[14 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
+ pe[13 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
+ pe[12 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
+ pe[11 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
+ pe[10 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
+ pe[ 9 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
+ pe[ 8 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c];
+ pe[ 7 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
+ pe[ 6 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
+ pe[ 5 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
+ pe[ 4 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
+ pe[ 3 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
+ pe[ 2 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
+ pe[ 1 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b];
+ pe[ 0 * 2 + 0] = DES_PC2(b, c, d, a);
/* Check if first half is weak */
w = (a ^ c) | (b ^ d) | (rs[a] ^ c) | (b ^ rs[d]);
@@ -670,22 +670,22 @@ unsigned long des_ekey(u32 *pe, const u8 *k)
/* Check if second half is weak */
w |= (a ^ c) | (b ^ d) | (rs[a] ^ c) | (b ^ rs[d]);
- pe[15 * 2 + 1] = PC2(a, b, c, d); d = rs[d];
- pe[14 * 2 + 1] = PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[13 * 2 + 1] = PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[12 * 2 + 1] = PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[11 * 2 + 1] = PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[10 * 2 + 1] = PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 9 * 2 + 1] = PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 8 * 2 + 1] = PC2(d, a, b, c); c = rs[c];
- pe[ 7 * 2 + 1] = PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 6 * 2 + 1] = PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[ 5 * 2 + 1] = PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 4 * 2 + 1] = PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[ 3 * 2 + 1] = PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 2 * 2 + 1] = PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[ 1 * 2 + 1] = PC2(c, d, a, b); b = rs[b];
- pe[ 0 * 2 + 1] = PC2(b, c, d, a);
+ pe[15 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d];
+ pe[14 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
+ pe[13 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
+ pe[12 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
+ pe[11 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
+ pe[10 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
+ pe[ 9 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
+ pe[ 8 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c];
+ pe[ 7 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
+ pe[ 6 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
+ pe[ 5 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
+ pe[ 4 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
+ pe[ 3 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
+ pe[ 2 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
+ pe[ 1 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b];
+ pe[ 0 * 2 + 1] = DES_PC2(b, c, d, a);
/* Fixup: 2413 5768 -> 1357 2468 */
for (d = 0; d < 16; ++d) {
@@ -722,22 +722,22 @@ static void dkey(u32 *pe, const u8 *k)
b = k[6]; b &= 0x0e; b <<= 4; b |= k[2] & 0x1e; b = pc1[b];
a = k[7]; a &= 0x0e; a <<= 4; a |= k[3] & 0x1e; a = pc1[a];
- pe[ 0 * 2] = PC2(a, b, c, d); d = rs[d];
- pe[ 1 * 2] = PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 2 * 2] = PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 3 * 2] = PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 4 * 2] = PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 5 * 2] = PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 6 * 2] = PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 7 * 2] = PC2(d, a, b, c); c = rs[c];
- pe[ 8 * 2] = PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 9 * 2] = PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[10 * 2] = PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[11 * 2] = PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[12 * 2] = PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[13 * 2] = PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[14 * 2] = PC2(c, d, a, b); b = rs[b];
- pe[15 * 2] = PC2(b, c, d, a);
+ pe[ 0 * 2] = DES_PC2(a, b, c, d); d = rs[d];
+ pe[ 1 * 2] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
+ pe[ 2 * 2] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
+ pe[ 3 * 2] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
+ pe[ 4 * 2] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
+ pe[ 5 * 2] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
+ pe[ 6 * 2] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
+ pe[ 7 * 2] = DES_PC2(d, a, b, c); c = rs[c];
+ pe[ 8 * 2] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
+ pe[ 9 * 2] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
+ pe[10 * 2] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
+ pe[11 * 2] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
+ pe[12 * 2] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
+ pe[13 * 2] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
+ pe[14 * 2] = DES_PC2(c, d, a, b); b = rs[b];
+ pe[15 * 2] = DES_PC2(b, c, d, a);
/* Skip to next table set */
pt += 512;
@@ -747,22 +747,22 @@ static void dkey(u32 *pe, const u8 *k)
b = k[2]; b &= 0xe0; b >>= 4; b |= k[6] & 0xf0; b = pc1[b + 1];
a = k[3]; a &= 0xe0; a >>= 4; a |= k[7] & 0xf0; a = pc1[a + 1];
- pe[ 0 * 2 + 1] = PC2(a, b, c, d); d = rs[d];
- pe[ 1 * 2 + 1] = PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 2 * 2 + 1] = PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 3 * 2 + 1] = PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 4 * 2 + 1] = PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 5 * 2 + 1] = PC2(d, a, b, c); c = rs[c]; b = rs[b];
- pe[ 6 * 2 + 1] = PC2(b, c, d, a); a = rs[a]; d = rs[d];
- pe[ 7 * 2 + 1] = PC2(d, a, b, c); c = rs[c];
- pe[ 8 * 2 + 1] = PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[ 9 * 2 + 1] = PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[10 * 2 + 1] = PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[11 * 2 + 1] = PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[12 * 2 + 1] = PC2(c, d, a, b); b = rs[b]; a = rs[a];
- pe[13 * 2 + 1] = PC2(a, b, c, d); d = rs[d]; c = rs[c];
- pe[14 * 2 + 1] = PC2(c, d, a, b); b = rs[b];
- pe[15 * 2 + 1] = PC2(b, c, d, a);
+ pe[ 0 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d];
+ pe[ 1 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
+ pe[ 2 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
+ pe[ 3 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
+ pe[ 4 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
+ pe[ 5 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
+ pe[ 6 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
+ pe[ 7 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c];
+ pe[ 8 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
+ pe[ 9 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
+ pe[10 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
+ pe[11 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
+ pe[12 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
+ pe[13 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
+ pe[14 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b];
+ pe[15 * 2 + 1] = DES_PC2(b, c, d, a);
/* Fixup: 2413 5768 -> 1357 2468 */
for (d = 0; d < 16; ++d) {
@@ -869,8 +869,7 @@ static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key,
if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
!((K[2] ^ K[4]) | (K[3] ^ K[5]))) &&
- (*flags & CRYPTO_TFM_REQ_WEAK_KEY))
- {
+ (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
*flags |= CRYPTO_TFM_RES_WEAK_KEY;
return -EINVAL;
}
@@ -944,59 +943,44 @@ static void des3_ede_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
d[1] = cpu_to_le32(L);
}
-static struct crypto_alg des_alg = {
+static struct crypto_alg des_algs[2] = { {
.cra_name = "des",
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = DES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct des_ctx),
.cra_module = THIS_MODULE,
.cra_alignmask = 3,
- .cra_list = LIST_HEAD_INIT(des_alg.cra_list),
.cra_u = { .cipher = {
.cia_min_keysize = DES_KEY_SIZE,
.cia_max_keysize = DES_KEY_SIZE,
.cia_setkey = des_setkey,
.cia_encrypt = des_encrypt,
.cia_decrypt = des_decrypt } }
-};
-
-static struct crypto_alg des3_ede_alg = {
+}, {
.cra_name = "des3_ede",
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct des3_ede_ctx),
.cra_module = THIS_MODULE,
.cra_alignmask = 3,
- .cra_list = LIST_HEAD_INIT(des3_ede_alg.cra_list),
.cra_u = { .cipher = {
.cia_min_keysize = DES3_EDE_KEY_SIZE,
.cia_max_keysize = DES3_EDE_KEY_SIZE,
.cia_setkey = des3_ede_setkey,
.cia_encrypt = des3_ede_encrypt,
.cia_decrypt = des3_ede_decrypt } }
-};
+} };
MODULE_ALIAS("des3_ede");
static int __init des_generic_mod_init(void)
{
- int ret = 0;
-
- ret = crypto_register_alg(&des_alg);
- if (ret < 0)
- goto out;
-
- ret = crypto_register_alg(&des3_ede_alg);
- if (ret < 0)
- crypto_unregister_alg(&des_alg);
-out:
- return ret;
+ return crypto_register_algs(des_algs, ARRAY_SIZE(des_algs));
}
static void __exit des_generic_mod_fini(void)
{
- crypto_unregister_alg(&des3_ede_alg);
- crypto_unregister_alg(&des_alg);
+ crypto_unregister_algs(des_algs, ARRAY_SIZE(des_algs));
}
module_init(des_generic_mod_init);
diff --git a/crypto/digest.c b/crypto/digest.c
deleted file mode 100644
index 5d3f1303da9..00000000000
--- a/crypto/digest.c
+++ /dev/null
@@ -1,240 +0,0 @@
-/*
- * Cryptographic API.
- *
- * Digest operations.
- *
- * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
- *
- * This program is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
- * any later version.
- *
- */
-
-#include <crypto/internal/hash.h>
-#include <crypto/scatterwalk.h>
-#include <linux/mm.h>
-#include <linux/errno.h>
-#include <linux/hardirq.h>
-#include <linux/highmem.h>
-#include <linux/kernel.h>
-#include <linux/module.h>
-#include <linux/scatterlist.h>
-
-#include "internal.h"
-
-static int init(struct hash_desc *desc)
-{
- struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm);
-
- tfm->__crt_alg->cra_digest.dia_init(tfm);
- return 0;
-}
-
-static int update2(struct hash_desc *desc,
- struct scatterlist *sg, unsigned int nbytes)
-{
- struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm);
- unsigned int alignmask = crypto_tfm_alg_alignmask(tfm);
-
- if (!nbytes)
- return 0;
-
- for (;;) {
- struct page *pg = sg_page(sg);
- unsigned int offset = sg->offset;
- unsigned int l = sg->length;
-
- if (unlikely(l > nbytes))
- l = nbytes;
- nbytes -= l;
-
- do {
- unsigned int bytes_from_page = min(l, ((unsigned int)
- (PAGE_SIZE)) -
- offset);
- char *src = crypto_kmap(pg, 0);
- char *p = src + offset;
-
- if (unlikely(offset & alignmask)) {
- unsigned int bytes =
- alignmask + 1 - (offset & alignmask);
- bytes = min(bytes, bytes_from_page);
- tfm->__crt_alg->cra_digest.dia_update(tfm, p,
- bytes);
- p += bytes;
- bytes_from_page -= bytes;
- l -= bytes;
- }
- tfm->__crt_alg->cra_digest.dia_update(tfm, p,
- bytes_from_page);
- crypto_kunmap(src, 0);
- crypto_yield(desc->flags);
- offset = 0;
- pg++;
- l -= bytes_from_page;
- } while (l > 0);
-
- if (!nbytes)
- break;
- sg = scatterwalk_sg_next(sg);
- }
-
- return 0;
-}
-
-static int update(struct hash_desc *desc,
- struct scatterlist *sg, unsigned int nbytes)
-{
- if (WARN_ON_ONCE(in_irq()))
- return -EDEADLK;
- return update2(desc, sg, nbytes);
-}
-
-static int final(struct hash_desc *desc, u8 *out)
-{
- struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm);
- unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
- struct digest_alg *digest = &tfm->__crt_alg->cra_digest;
-
- if (unlikely((unsigned long)out & alignmask)) {
- unsigned long align = alignmask + 1;
- unsigned long addr = (unsigned long)crypto_tfm_ctx(tfm);
- u8 *dst = (u8 *)ALIGN(addr, align) +
- ALIGN(tfm->__crt_alg->cra_ctxsize, align);
-
- digest->dia_final(tfm, dst);
- memcpy(out, dst, digest->dia_digestsize);
- } else
- digest->dia_final(tfm, out);
-
- return 0;
-}
-
-static int nosetkey(struct crypto_hash *tfm, const u8 *key, unsigned int keylen)
-{
- crypto_hash_clear_flags(tfm, CRYPTO_TFM_RES_MASK);
- return -ENOSYS;
-}
-
-static int setkey(struct crypto_hash *hash, const u8 *key, unsigned int keylen)
-{
- struct crypto_tfm *tfm = crypto_hash_tfm(hash);
-
- crypto_hash_clear_flags(hash, CRYPTO_TFM_RES_MASK);
- return tfm->__crt_alg->cra_digest.dia_setkey(tfm, key, keylen);
-}
-
-static int digest(struct hash_desc *desc,
- struct scatterlist *sg, unsigned int nbytes, u8 *out)
-{
- if (WARN_ON_ONCE(in_irq()))
- return -EDEADLK;
-
- init(desc);
- update2(desc, sg, nbytes);
- return final(desc, out);
-}
-
-int crypto_init_digest_ops(struct crypto_tfm *tfm)
-{
- struct hash_tfm *ops = &tfm->crt_hash;
- struct digest_alg *dalg = &tfm->__crt_alg->cra_digest;
-
- if (dalg->dia_digestsize > PAGE_SIZE / 8)
- return -EINVAL;
-
- ops->init = init;
- ops->update = update;
- ops->final = final;
- ops->digest = digest;
- ops->setkey = dalg->dia_setkey ? setkey : nosetkey;
- ops->digestsize = dalg->dia_digestsize;
-
- return 0;
-}
-
-void crypto_exit_digest_ops(struct crypto_tfm *tfm)
-{
-}
-
-static int digest_async_nosetkey(struct crypto_ahash *tfm_async, const u8 *key,
- unsigned int keylen)
-{
- crypto_ahash_clear_flags(tfm_async, CRYPTO_TFM_RES_MASK);
- return -ENOSYS;
-}
-
-static int digest_async_setkey(struct crypto_ahash *tfm_async, const u8 *key,
- unsigned int keylen)
-{
- struct crypto_tfm *tfm = crypto_ahash_tfm(tfm_async);
- struct digest_alg *dalg = &tfm->__crt_alg->cra_digest;
-
- crypto_ahash_clear_flags(tfm_async, CRYPTO_TFM_RES_MASK);
- return dalg->dia_setkey(tfm, key, keylen);
-}
-
-static int digest_async_init(struct ahash_request *req)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- struct digest_alg *dalg = &tfm->__crt_alg->cra_digest;
-
- dalg->dia_init(tfm);
- return 0;
-}
-
-static int digest_async_update(struct ahash_request *req)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- struct hash_desc desc = {
- .tfm = __crypto_hash_cast(tfm),
- .flags = req->base.flags,
- };
-
- update(&desc, req->src, req->nbytes);
- return 0;
-}
-
-static int digest_async_final(struct ahash_request *req)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- struct hash_desc desc = {
- .tfm = __crypto_hash_cast(tfm),
- .flags = req->base.flags,
- };
-
- final(&desc, req->result);
- return 0;
-}
-
-static int digest_async_digest(struct ahash_request *req)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- struct hash_desc desc = {
- .tfm = __crypto_hash_cast(tfm),
- .flags = req->base.flags,
- };
-
- return digest(&desc, req->src, req->nbytes, req->result);
-}
-
-int crypto_init_digest_ops_async(struct crypto_tfm *tfm)
-{
- struct ahash_tfm *crt = &tfm->crt_ahash;
- struct digest_alg *dalg = &tfm->__crt_alg->cra_digest;
-
- if (dalg->dia_digestsize > PAGE_SIZE / 8)
- return -EINVAL;
-
- crt->init = digest_async_init;
- crt->update = digest_async_update;
- crt->final = digest_async_final;
- crt->digest = digest_async_digest;
- crt->setkey = dalg->dia_setkey ? digest_async_setkey :
- digest_async_nosetkey;
- crt->digestsize = dalg->dia_digestsize;
-
- return 0;
-}
diff --git a/crypto/ecb.c b/crypto/ecb.c
index a46838e98a7..935cfef4aa8 100644
--- a/crypto/ecb.c
+++ b/crypto/ecb.c
@@ -55,7 +55,7 @@ static int crypto_ecb_crypt(struct blkcipher_desc *desc,
do {
fn(crypto_cipher_tfm(tfm), wdst, wsrc);
-
+
wsrc += bsize;
wdst += bsize;
} while ((nbytes -= bsize) >= bsize);
diff --git a/crypto/eseqiv.c b/crypto/eseqiv.c
index 2a342c8e52b..42ce9f570ae 100644
--- a/crypto/eseqiv.c
+++ b/crypto/eseqiv.c
@@ -62,20 +62,6 @@ out:
skcipher_givcrypt_complete(req, err);
}
-static void eseqiv_chain(struct scatterlist *head, struct scatterlist *sg,
- int chain)
-{
- if (chain) {
- head->length += sg->length;
- sg = scatterwalk_sg_next(sg);
- }
-
- if (sg)
- scatterwalk_sg_chain(head, 2, sg);
- else
- sg_mark_end(head);
-}
-
static int eseqiv_givencrypt(struct skcipher_givcrypt_request *req)
{
struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
@@ -124,13 +110,13 @@ static int eseqiv_givencrypt(struct skcipher_givcrypt_request *req)
sg_init_table(reqctx->src, 2);
sg_set_buf(reqctx->src, giv, ivsize);
- eseqiv_chain(reqctx->src, osrc, vsrc == giv + ivsize);
+ scatterwalk_crypto_chain(reqctx->src, osrc, vsrc == giv + ivsize, 2);
dst = reqctx->src;
if (osrc != odst) {
sg_init_table(reqctx->dst, 2);
sg_set_buf(reqctx->dst, giv, ivsize);
- eseqiv_chain(reqctx->dst, odst, vdst == giv + ivsize);
+ scatterwalk_crypto_chain(reqctx->dst, odst, vdst == giv + ivsize, 2);
dst = reqctx->dst;
}
@@ -153,7 +139,8 @@ static int eseqiv_givencrypt(struct skcipher_givcrypt_request *req)
if (err)
goto out;
- eseqiv_complete2(req);
+ if (giv != req->giv)
+ eseqiv_complete2(req);
out:
return err;
diff --git a/crypto/fcrypt.c b/crypto/fcrypt.c
index b82d61f4e26..021d7fec6bc 100644
--- a/crypto/fcrypt.c
+++ b/crypto/fcrypt.c
@@ -60,13 +60,13 @@ do { \
u32 t = lo & ((1 << n) - 1); \
lo = (lo >> n) | ((hi & ((1 << n) - 1)) << (32 - n)); \
hi = (hi >> n) | (t << (24-n)); \
-} while(0)
+} while (0)
/* Rotate right one 64 bit number as a 56 bit number */
#define ror56_64(k, n) \
do { \
k = (k >> n) | ((k & ((1 << n) - 1)) << (56 - n)); \
-} while(0)
+} while (0)
/*
* Sboxes for Feistel network derived from
@@ -110,7 +110,7 @@ static const __be32 sbox0[256] = {
};
#undef Z
-#define Z(x) cpu_to_be32((x << 27) | (x >> 5))
+#define Z(x) cpu_to_be32(((x & 0x1f) << 27) | (x >> 5))
static const __be32 sbox1[256] = {
Z(0x77), Z(0x14), Z(0xa6), Z(0xfe), Z(0xb2), Z(0x5e), Z(0x8c), Z(0x3e),
Z(0x67), Z(0x6c), Z(0xa1), Z(0x0d), Z(0xc2), Z(0xa2), Z(0xc1), Z(0x85),
@@ -228,7 +228,7 @@ do { \
union lc4 { __be32 l; u8 c[4]; } u; \
u.l = sched ^ R; \
L ^= sbox0[u.c[0]] ^ sbox1[u.c[1]] ^ sbox2[u.c[2]] ^ sbox3[u.c[3]]; \
-} while(0)
+} while (0)
/*
* encryptor
@@ -396,7 +396,6 @@ static struct crypto_alg fcrypt_alg = {
.cra_ctxsize = sizeof(struct fcrypt_ctx),
.cra_module = THIS_MODULE,
.cra_alignmask = 3,
- .cra_list = LIST_HEAD_INIT(fcrypt_alg.cra_list),
.cra_u = { .cipher = {
.cia_min_keysize = 8,
.cia_max_keysize = 8,
diff --git a/crypto/gcm.c b/crypto/gcm.c
index e70afd0c73d..b4f01793900 100644
--- a/crypto/gcm.c
+++ b/crypto/gcm.c
@@ -11,7 +11,10 @@
#include <crypto/gf128mul.h>
#include <crypto/internal/aead.h>
#include <crypto/internal/skcipher.h>
+#include <crypto/internal/hash.h>
#include <crypto/scatterwalk.h>
+#include <crypto/hash.h>
+#include "internal.h"
#include <linux/completion.h>
#include <linux/err.h>
#include <linux/init.h>
@@ -21,11 +24,12 @@
struct gcm_instance_ctx {
struct crypto_skcipher_spawn ctr;
+ struct crypto_ahash_spawn ghash;
};
struct crypto_gcm_ctx {
struct crypto_ablkcipher *ctr;
- struct gf128mul_4k *gf128;
+ struct crypto_ahash *ghash;
};
struct crypto_rfc4106_ctx {
@@ -33,11 +37,30 @@ struct crypto_rfc4106_ctx {
u8 nonce[4];
};
+struct crypto_rfc4543_instance_ctx {
+ struct crypto_aead_spawn aead;
+ struct crypto_skcipher_spawn null;
+};
+
+struct crypto_rfc4543_ctx {
+ struct crypto_aead *child;
+ struct crypto_blkcipher *null;
+ u8 nonce[4];
+};
+
+struct crypto_rfc4543_req_ctx {
+ u8 auth_tag[16];
+ u8 assocbuf[32];
+ struct scatterlist cipher[1];
+ struct scatterlist payload[2];
+ struct scatterlist assoc[2];
+ struct aead_request subreq;
+};
+
struct crypto_gcm_ghash_ctx {
- u32 bytes;
- u32 flags;
- struct gf128mul_4k *gf128;
- u8 buffer[16];
+ unsigned int cryptlen;
+ struct scatterlist *src;
+ void (*complete)(struct aead_request *req, int err);
};
struct crypto_gcm_req_priv_ctx {
@@ -45,8 +68,11 @@ struct crypto_gcm_req_priv_ctx {
u8 iauth_tag[16];
struct scatterlist src[2];
struct scatterlist dst[2];
- struct crypto_gcm_ghash_ctx ghash;
- struct ablkcipher_request abreq;
+ struct crypto_gcm_ghash_ctx ghash_ctx;
+ union {
+ struct ahash_request ahreq;
+ struct ablkcipher_request abreq;
+ } u;
};
struct crypto_gcm_setkey_result {
@@ -54,6 +80,8 @@ struct crypto_gcm_setkey_result {
struct completion completion;
};
+static void *gcm_zeroes;
+
static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
struct aead_request *req)
{
@@ -62,113 +90,6 @@ static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
}
-static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
- struct gf128mul_4k *gf128)
-{
- ctx->bytes = 0;
- ctx->flags = flags;
- ctx->gf128 = gf128;
- memset(ctx->buffer, 0, 16);
-}
-
-static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
- const u8 *src, unsigned int srclen)
-{
- u8 *dst = ctx->buffer;
-
- if (ctx->bytes) {
- int n = min(srclen, ctx->bytes);
- u8 *pos = dst + (16 - ctx->bytes);
-
- ctx->bytes -= n;
- srclen -= n;
-
- while (n--)
- *pos++ ^= *src++;
-
- if (!ctx->bytes)
- gf128mul_4k_lle((be128 *)dst, ctx->gf128);
- }
-
- while (srclen >= 16) {
- crypto_xor(dst, src, 16);
- gf128mul_4k_lle((be128 *)dst, ctx->gf128);
- src += 16;
- srclen -= 16;
- }
-
- if (srclen) {
- ctx->bytes = 16 - srclen;
- while (srclen--)
- *dst++ ^= *src++;
- }
-}
-
-static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
- struct scatterlist *sg, int len)
-{
- struct scatter_walk walk;
- u8 *src;
- int n;
-
- if (!len)
- return;
-
- scatterwalk_start(&walk, sg);
-
- while (len) {
- n = scatterwalk_clamp(&walk, len);
-
- if (!n) {
- scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
- n = scatterwalk_clamp(&walk, len);
- }
-
- src = scatterwalk_map(&walk, 0);
-
- crypto_gcm_ghash_update(ctx, src, n);
- len -= n;
-
- scatterwalk_unmap(src, 0);
- scatterwalk_advance(&walk, n);
- scatterwalk_done(&walk, 0, len);
- if (len)
- crypto_yield(ctx->flags);
- }
-}
-
-static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
-{
- u8 *dst = ctx->buffer;
-
- if (ctx->bytes) {
- u8 *tmp = dst + (16 - ctx->bytes);
-
- while (ctx->bytes--)
- *tmp++ ^= 0;
-
- gf128mul_4k_lle((be128 *)dst, ctx->gf128);
- }
-
- ctx->bytes = 0;
-}
-
-static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
- unsigned int authlen,
- unsigned int cryptlen, u8 *dst)
-{
- u8 *buf = ctx->buffer;
- u128 lengths;
-
- lengths.a = cpu_to_be64(authlen * 8);
- lengths.b = cpu_to_be64(cryptlen * 8);
-
- crypto_gcm_ghash_flush(ctx);
- crypto_xor(buf, (u8 *)&lengths, 16);
- gf128mul_4k_lle((be128 *)buf, ctx->gf128);
- crypto_xor(dst, buf, 16);
-}
-
static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
{
struct crypto_gcm_setkey_result *result = req->data;
@@ -184,6 +105,7 @@ static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
unsigned int keylen)
{
struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
+ struct crypto_ahash *ghash = ctx->ghash;
struct crypto_ablkcipher *ctr = ctx->ctr;
struct {
be128 hash;
@@ -233,13 +155,12 @@ static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
if (err)
goto out;
- if (ctx->gf128 != NULL)
- gf128mul_free_4k(ctx->gf128);
-
- ctx->gf128 = gf128mul_init_4k_lle(&data->hash);
-
- if (ctx->gf128 == NULL)
- err = -ENOMEM;
+ crypto_ahash_clear_flags(ghash, CRYPTO_TFM_REQ_MASK);
+ crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) &
+ CRYPTO_TFM_REQ_MASK);
+ err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128));
+ crypto_aead_set_flags(aead, crypto_ahash_get_flags(ghash) &
+ CRYPTO_TFM_RES_MASK);
out:
kfree(data);
@@ -272,8 +193,6 @@ static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
struct crypto_aead *aead = crypto_aead_reqtfm(req);
struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
- u32 flags = req->base.tfm->crt_flags;
- struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
struct scatterlist *dst;
__be32 counter = cpu_to_be32(1);
@@ -296,35 +215,329 @@ static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
cryptlen + sizeof(pctx->auth_tag),
req->iv);
+}
- crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
+static inline unsigned int gcm_remain(unsigned int len)
+{
+ len &= 0xfU;
+ return len ? 16 - len : 0;
+}
- crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
- crypto_gcm_ghash_flush(ghash);
+static void gcm_hash_len_done(struct crypto_async_request *areq, int err);
+static void gcm_hash_final_done(struct crypto_async_request *areq, int err);
+
+static int gcm_hash_update(struct aead_request *req,
+ struct crypto_gcm_req_priv_ctx *pctx,
+ crypto_completion_t complete,
+ struct scatterlist *src,
+ unsigned int len)
+{
+ struct ahash_request *ahreq = &pctx->u.ahreq;
+
+ ahash_request_set_callback(ahreq, aead_request_flags(req),
+ complete, req);
+ ahash_request_set_crypt(ahreq, src, NULL, len);
+
+ return crypto_ahash_update(ahreq);
}
-static int crypto_gcm_hash(struct aead_request *req)
+static int gcm_hash_remain(struct aead_request *req,
+ struct crypto_gcm_req_priv_ctx *pctx,
+ unsigned int remain,
+ crypto_completion_t complete)
+{
+ struct ahash_request *ahreq = &pctx->u.ahreq;
+
+ ahash_request_set_callback(ahreq, aead_request_flags(req),
+ complete, req);
+ sg_init_one(pctx->src, gcm_zeroes, remain);
+ ahash_request_set_crypt(ahreq, pctx->src, NULL, remain);
+
+ return crypto_ahash_update(ahreq);
+}
+
+static int gcm_hash_len(struct aead_request *req,
+ struct crypto_gcm_req_priv_ctx *pctx)
+{
+ struct ahash_request *ahreq = &pctx->u.ahreq;
+ struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
+ u128 lengths;
+
+ lengths.a = cpu_to_be64(req->assoclen * 8);
+ lengths.b = cpu_to_be64(gctx->cryptlen * 8);
+ memcpy(pctx->iauth_tag, &lengths, 16);
+ sg_init_one(pctx->src, pctx->iauth_tag, 16);
+ ahash_request_set_callback(ahreq, aead_request_flags(req),
+ gcm_hash_len_done, req);
+ ahash_request_set_crypt(ahreq, pctx->src,
+ NULL, sizeof(lengths));
+
+ return crypto_ahash_update(ahreq);
+}
+
+static int gcm_hash_final(struct aead_request *req,
+ struct crypto_gcm_req_priv_ctx *pctx)
+{
+ struct ahash_request *ahreq = &pctx->u.ahreq;
+
+ ahash_request_set_callback(ahreq, aead_request_flags(req),
+ gcm_hash_final_done, req);
+ ahash_request_set_crypt(ahreq, NULL, pctx->iauth_tag, 0);
+
+ return crypto_ahash_final(ahreq);
+}
+
+static void __gcm_hash_final_done(struct aead_request *req, int err)
{
- struct crypto_aead *aead = crypto_aead_reqtfm(req);
struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
- u8 *auth_tag = pctx->auth_tag;
- struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
+ struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
+
+ if (!err)
+ crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16);
+
+ gctx->complete(req, err);
+}
+
+static void gcm_hash_final_done(struct crypto_async_request *areq, int err)
+{
+ struct aead_request *req = areq->data;
+
+ __gcm_hash_final_done(req, err);
+}
+
+static void __gcm_hash_len_done(struct aead_request *req, int err)
+{
+ struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+
+ if (!err) {
+ err = gcm_hash_final(req, pctx);
+ if (err == -EINPROGRESS || err == -EBUSY)
+ return;
+ }
+
+ __gcm_hash_final_done(req, err);
+}
+
+static void gcm_hash_len_done(struct crypto_async_request *areq, int err)
+{
+ struct aead_request *req = areq->data;
+
+ __gcm_hash_len_done(req, err);
+}
+
+static void __gcm_hash_crypt_remain_done(struct aead_request *req, int err)
+{
+ struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+
+ if (!err) {
+ err = gcm_hash_len(req, pctx);
+ if (err == -EINPROGRESS || err == -EBUSY)
+ return;
+ }
+
+ __gcm_hash_len_done(req, err);
+}
+
+static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq,
+ int err)
+{
+ struct aead_request *req = areq->data;
- crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen);
- crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen,
- auth_tag);
+ __gcm_hash_crypt_remain_done(req, err);
+}
+
+static void __gcm_hash_crypt_done(struct aead_request *req, int err)
+{
+ struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+ struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
+ unsigned int remain;
+
+ if (!err) {
+ remain = gcm_remain(gctx->cryptlen);
+ BUG_ON(!remain);
+ err = gcm_hash_remain(req, pctx, remain,
+ gcm_hash_crypt_remain_done);
+ if (err == -EINPROGRESS || err == -EBUSY)
+ return;
+ }
+
+ __gcm_hash_crypt_remain_done(req, err);
+}
+
+static void gcm_hash_crypt_done(struct crypto_async_request *areq, int err)
+{
+ struct aead_request *req = areq->data;
+
+ __gcm_hash_crypt_done(req, err);
+}
+
+static void __gcm_hash_assoc_remain_done(struct aead_request *req, int err)
+{
+ struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+ struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
+ crypto_completion_t complete;
+ unsigned int remain = 0;
+
+ if (!err && gctx->cryptlen) {
+ remain = gcm_remain(gctx->cryptlen);
+ complete = remain ? gcm_hash_crypt_done :
+ gcm_hash_crypt_remain_done;
+ err = gcm_hash_update(req, pctx, complete,
+ gctx->src, gctx->cryptlen);
+ if (err == -EINPROGRESS || err == -EBUSY)
+ return;
+ }
+
+ if (remain)
+ __gcm_hash_crypt_done(req, err);
+ else
+ __gcm_hash_crypt_remain_done(req, err);
+}
+
+static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq,
+ int err)
+{
+ struct aead_request *req = areq->data;
+
+ __gcm_hash_assoc_remain_done(req, err);
+}
+
+static void __gcm_hash_assoc_done(struct aead_request *req, int err)
+{
+ struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+ unsigned int remain;
+
+ if (!err) {
+ remain = gcm_remain(req->assoclen);
+ BUG_ON(!remain);
+ err = gcm_hash_remain(req, pctx, remain,
+ gcm_hash_assoc_remain_done);
+ if (err == -EINPROGRESS || err == -EBUSY)
+ return;
+ }
+
+ __gcm_hash_assoc_remain_done(req, err);
+}
+
+static void gcm_hash_assoc_done(struct crypto_async_request *areq, int err)
+{
+ struct aead_request *req = areq->data;
+
+ __gcm_hash_assoc_done(req, err);
+}
+
+static void __gcm_hash_init_done(struct aead_request *req, int err)
+{
+ struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+ crypto_completion_t complete;
+ unsigned int remain = 0;
+
+ if (!err && req->assoclen) {
+ remain = gcm_remain(req->assoclen);
+ complete = remain ? gcm_hash_assoc_done :
+ gcm_hash_assoc_remain_done;
+ err = gcm_hash_update(req, pctx, complete,
+ req->assoc, req->assoclen);
+ if (err == -EINPROGRESS || err == -EBUSY)
+ return;
+ }
+
+ if (remain)
+ __gcm_hash_assoc_done(req, err);
+ else
+ __gcm_hash_assoc_remain_done(req, err);
+}
+
+static void gcm_hash_init_done(struct crypto_async_request *areq, int err)
+{
+ struct aead_request *req = areq->data;
+
+ __gcm_hash_init_done(req, err);
+}
+
+static int gcm_hash(struct aead_request *req,
+ struct crypto_gcm_req_priv_ctx *pctx)
+{
+ struct ahash_request *ahreq = &pctx->u.ahreq;
+ struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
+ struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
+ unsigned int remain;
+ crypto_completion_t complete;
+ int err;
+
+ ahash_request_set_tfm(ahreq, ctx->ghash);
+
+ ahash_request_set_callback(ahreq, aead_request_flags(req),
+ gcm_hash_init_done, req);
+ err = crypto_ahash_init(ahreq);
+ if (err)
+ return err;
+ remain = gcm_remain(req->assoclen);
+ complete = remain ? gcm_hash_assoc_done : gcm_hash_assoc_remain_done;
+ err = gcm_hash_update(req, pctx, complete, req->assoc, req->assoclen);
+ if (err)
+ return err;
+ if (remain) {
+ err = gcm_hash_remain(req, pctx, remain,
+ gcm_hash_assoc_remain_done);
+ if (err)
+ return err;
+ }
+ remain = gcm_remain(gctx->cryptlen);
+ complete = remain ? gcm_hash_crypt_done : gcm_hash_crypt_remain_done;
+ err = gcm_hash_update(req, pctx, complete, gctx->src, gctx->cryptlen);
+ if (err)
+ return err;
+ if (remain) {
+ err = gcm_hash_remain(req, pctx, remain,
+ gcm_hash_crypt_remain_done);
+ if (err)
+ return err;
+ }
+ err = gcm_hash_len(req, pctx);
+ if (err)
+ return err;
+ err = gcm_hash_final(req, pctx);
+ if (err)
+ return err;
+
+ return 0;
+}
+
+static void gcm_enc_copy_hash(struct aead_request *req,
+ struct crypto_gcm_req_priv_ctx *pctx)
+{
+ struct crypto_aead *aead = crypto_aead_reqtfm(req);
+ u8 *auth_tag = pctx->auth_tag;
scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
crypto_aead_authsize(aead), 1);
- return 0;
}
-static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err)
+static void gcm_enc_hash_done(struct aead_request *req, int err)
{
- struct aead_request *req = areq->data;
+ struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
if (!err)
- err = crypto_gcm_hash(req);
+ gcm_enc_copy_hash(req, pctx);
+
+ aead_request_complete(req, err);
+}
+
+static void gcm_encrypt_done(struct crypto_async_request *areq, int err)
+{
+ struct aead_request *req = areq->data;
+ struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+
+ if (!err) {
+ err = gcm_hash(req, pctx);
+ if (err == -EINPROGRESS || err == -EBUSY)
+ return;
+ else if (!err) {
+ crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16);
+ gcm_enc_copy_hash(req, pctx);
+ }
+ }
aead_request_complete(req, err);
}
@@ -332,43 +545,73 @@ static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err)
static int crypto_gcm_encrypt(struct aead_request *req)
{
struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
- struct ablkcipher_request *abreq = &pctx->abreq;
+ struct ablkcipher_request *abreq = &pctx->u.abreq;
+ struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
int err;
crypto_gcm_init_crypt(abreq, req, req->cryptlen);
ablkcipher_request_set_callback(abreq, aead_request_flags(req),
- crypto_gcm_encrypt_done, req);
+ gcm_encrypt_done, req);
+
+ gctx->src = req->dst;
+ gctx->cryptlen = req->cryptlen;
+ gctx->complete = gcm_enc_hash_done;
err = crypto_ablkcipher_encrypt(abreq);
if (err)
return err;
- return crypto_gcm_hash(req);
+ err = gcm_hash(req, pctx);
+ if (err)
+ return err;
+
+ crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16);
+ gcm_enc_copy_hash(req, pctx);
+
+ return 0;
}
-static int crypto_gcm_verify(struct aead_request *req)
+static int crypto_gcm_verify(struct aead_request *req,
+ struct crypto_gcm_req_priv_ctx *pctx)
{
struct crypto_aead *aead = crypto_aead_reqtfm(req);
- struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
- struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
u8 *auth_tag = pctx->auth_tag;
u8 *iauth_tag = pctx->iauth_tag;
unsigned int authsize = crypto_aead_authsize(aead);
unsigned int cryptlen = req->cryptlen - authsize;
- crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
-
- authsize = crypto_aead_authsize(aead);
+ crypto_xor(auth_tag, iauth_tag, 16);
scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
- return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
+ return crypto_memneq(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
}
-static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
+static void gcm_decrypt_done(struct crypto_async_request *areq, int err)
{
struct aead_request *req = areq->data;
+ struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
if (!err)
- err = crypto_gcm_verify(req);
+ err = crypto_gcm_verify(req, pctx);
+
+ aead_request_complete(req, err);
+}
+
+static void gcm_dec_hash_done(struct aead_request *req, int err)
+{
+ struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
+ struct ablkcipher_request *abreq = &pctx->u.abreq;
+ struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
+
+ if (!err) {
+ ablkcipher_request_set_callback(abreq, aead_request_flags(req),
+ gcm_decrypt_done, req);
+ crypto_gcm_init_crypt(abreq, req, gctx->cryptlen);
+ err = crypto_ablkcipher_decrypt(abreq);
+ if (err == -EINPROGRESS || err == -EBUSY)
+ return;
+ else if (!err)
+ err = crypto_gcm_verify(req, pctx);
+ }
aead_request_complete(req, err);
}
@@ -377,27 +620,32 @@ static int crypto_gcm_decrypt(struct aead_request *req)
{
struct crypto_aead *aead = crypto_aead_reqtfm(req);
struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
- struct ablkcipher_request *abreq = &pctx->abreq;
- struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
- unsigned int cryptlen = req->cryptlen;
+ struct ablkcipher_request *abreq = &pctx->u.abreq;
+ struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
unsigned int authsize = crypto_aead_authsize(aead);
+ unsigned int cryptlen = req->cryptlen;
int err;
if (cryptlen < authsize)
return -EINVAL;
cryptlen -= authsize;
- crypto_gcm_init_crypt(abreq, req, cryptlen);
- ablkcipher_request_set_callback(abreq, aead_request_flags(req),
- crypto_gcm_decrypt_done, req);
+ gctx->src = req->src;
+ gctx->cryptlen = cryptlen;
+ gctx->complete = gcm_dec_hash_done;
- crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
+ err = gcm_hash(req, pctx);
+ if (err)
+ return err;
+ ablkcipher_request_set_callback(abreq, aead_request_flags(req),
+ gcm_decrypt_done, req);
+ crypto_gcm_init_crypt(abreq, req, cryptlen);
err = crypto_ablkcipher_decrypt(abreq);
if (err)
return err;
- return crypto_gcm_verify(req);
+ return crypto_gcm_verify(req, pctx);
}
static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
@@ -406,65 +654,90 @@ static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
struct crypto_ablkcipher *ctr;
+ struct crypto_ahash *ghash;
unsigned long align;
int err;
+ ghash = crypto_spawn_ahash(&ictx->ghash);
+ if (IS_ERR(ghash))
+ return PTR_ERR(ghash);
+
ctr = crypto_spawn_skcipher(&ictx->ctr);
err = PTR_ERR(ctr);
if (IS_ERR(ctr))
- return err;
+ goto err_free_hash;
ctx->ctr = ctr;
- ctx->gf128 = NULL;
+ ctx->ghash = ghash;
align = crypto_tfm_alg_alignmask(tfm);
align &= ~(crypto_tfm_ctx_alignment() - 1);
tfm->crt_aead.reqsize = align +
- sizeof(struct crypto_gcm_req_priv_ctx) +
- crypto_ablkcipher_reqsize(ctr);
+ offsetof(struct crypto_gcm_req_priv_ctx, u) +
+ max(sizeof(struct ablkcipher_request) +
+ crypto_ablkcipher_reqsize(ctr),
+ sizeof(struct ahash_request) +
+ crypto_ahash_reqsize(ghash));
return 0;
+
+err_free_hash:
+ crypto_free_ahash(ghash);
+ return err;
}
static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
{
struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
- if (ctx->gf128 != NULL)
- gf128mul_free_4k(ctx->gf128);
-
+ crypto_free_ahash(ctx->ghash);
crypto_free_ablkcipher(ctx->ctr);
}
static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
const char *full_name,
- const char *ctr_name)
+ const char *ctr_name,
+ const char *ghash_name)
{
struct crypto_attr_type *algt;
struct crypto_instance *inst;
struct crypto_alg *ctr;
+ struct crypto_alg *ghash_alg;
+ struct ahash_alg *ghash_ahash_alg;
struct gcm_instance_ctx *ctx;
int err;
algt = crypto_get_attr_type(tb);
- err = PTR_ERR(algt);
if (IS_ERR(algt))
- return ERR_PTR(err);
+ return ERR_CAST(algt);
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return ERR_PTR(-EINVAL);
+ ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type,
+ CRYPTO_ALG_TYPE_HASH,
+ CRYPTO_ALG_TYPE_AHASH_MASK);
+ if (IS_ERR(ghash_alg))
+ return ERR_CAST(ghash_alg);
+
+ err = -ENOMEM;
inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
if (!inst)
- return ERR_PTR(-ENOMEM);
+ goto out_put_ghash;
ctx = crypto_instance_ctx(inst);
+ ghash_ahash_alg = container_of(ghash_alg, struct ahash_alg, halg.base);
+ err = crypto_init_ahash_spawn(&ctx->ghash, &ghash_ahash_alg->halg,
+ inst);
+ if (err)
+ goto err_free_inst;
+
crypto_set_skcipher_spawn(&ctx->ctr, inst);
err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0,
crypto_requires_sync(algt->type,
algt->mask));
if (err)
- goto err_free_inst;
+ goto err_drop_ghash;
ctr = crypto_skcipher_spawn_alg(&ctx->ctr);
@@ -479,7 +752,8 @@ static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
err = -ENAMETOOLONG;
if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
- "gcm_base(%s)", ctr->cra_driver_name) >=
+ "gcm_base(%s,%s)", ctr->cra_driver_name,
+ ghash_alg->cra_driver_name) >=
CRYPTO_MAX_ALG_NAME)
goto out_put_ctr;
@@ -502,27 +776,29 @@ static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
out:
+ crypto_mod_put(ghash_alg);
return inst;
out_put_ctr:
crypto_drop_skcipher(&ctx->ctr);
+err_drop_ghash:
+ crypto_drop_ahash(&ctx->ghash);
err_free_inst:
kfree(inst);
+out_put_ghash:
inst = ERR_PTR(err);
goto out;
}
static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
{
- int err;
const char *cipher_name;
char ctr_name[CRYPTO_MAX_ALG_NAME];
char full_name[CRYPTO_MAX_ALG_NAME];
cipher_name = crypto_attr_alg_name(tb[1]);
- err = PTR_ERR(cipher_name);
if (IS_ERR(cipher_name))
- return ERR_PTR(err);
+ return ERR_CAST(cipher_name);
if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
CRYPTO_MAX_ALG_NAME)
@@ -532,7 +808,7 @@ static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
CRYPTO_MAX_ALG_NAME)
return ERR_PTR(-ENAMETOOLONG);
- return crypto_gcm_alloc_common(tb, full_name, ctr_name);
+ return crypto_gcm_alloc_common(tb, full_name, ctr_name, "ghash");
}
static void crypto_gcm_free(struct crypto_instance *inst)
@@ -540,6 +816,7 @@ static void crypto_gcm_free(struct crypto_instance *inst)
struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
crypto_drop_skcipher(&ctx->ctr);
+ crypto_drop_ahash(&ctx->ghash);
kfree(inst);
}
@@ -552,20 +829,23 @@ static struct crypto_template crypto_gcm_tmpl = {
static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
{
- int err;
const char *ctr_name;
+ const char *ghash_name;
char full_name[CRYPTO_MAX_ALG_NAME];
ctr_name = crypto_attr_alg_name(tb[1]);
- err = PTR_ERR(ctr_name);
if (IS_ERR(ctr_name))
- return ERR_PTR(err);
+ return ERR_CAST(ctr_name);
- if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s)",
- ctr_name) >= CRYPTO_MAX_ALG_NAME)
+ ghash_name = crypto_attr_alg_name(tb[2]);
+ if (IS_ERR(ghash_name))
+ return ERR_CAST(ghash_name);
+
+ if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s,%s)",
+ ctr_name, ghash_name) >= CRYPTO_MAX_ALG_NAME)
return ERR_PTR(-ENAMETOOLONG);
- return crypto_gcm_alloc_common(tb, full_name, ctr_name);
+ return crypto_gcm_alloc_common(tb, full_name, ctr_name, ghash_name);
}
static struct crypto_template crypto_gcm_base_tmpl = {
@@ -691,17 +971,15 @@ static struct crypto_instance *crypto_rfc4106_alloc(struct rtattr **tb)
int err;
algt = crypto_get_attr_type(tb);
- err = PTR_ERR(algt);
if (IS_ERR(algt))
- return ERR_PTR(err);
+ return ERR_CAST(algt);
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return ERR_PTR(-EINVAL);
ccm_name = crypto_attr_alg_name(tb[1]);
- err = PTR_ERR(ccm_name);
if (IS_ERR(ccm_name))
- return ERR_PTR(err);
+ return ERR_CAST(ccm_name);
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
if (!inst)
@@ -780,10 +1058,345 @@ static struct crypto_template crypto_rfc4106_tmpl = {
.module = THIS_MODULE,
};
+static inline struct crypto_rfc4543_req_ctx *crypto_rfc4543_reqctx(
+ struct aead_request *req)
+{
+ unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
+
+ return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
+}
+
+static int crypto_rfc4543_setkey(struct crypto_aead *parent, const u8 *key,
+ unsigned int keylen)
+{
+ struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
+ struct crypto_aead *child = ctx->child;
+ int err;
+
+ if (keylen < 4)
+ return -EINVAL;
+
+ keylen -= 4;
+ memcpy(ctx->nonce, key + keylen, 4);
+
+ crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
+ crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
+ CRYPTO_TFM_REQ_MASK);
+ err = crypto_aead_setkey(child, key, keylen);
+ crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
+ CRYPTO_TFM_RES_MASK);
+
+ return err;
+}
+
+static int crypto_rfc4543_setauthsize(struct crypto_aead *parent,
+ unsigned int authsize)
+{
+ struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
+
+ if (authsize != 16)
+ return -EINVAL;
+
+ return crypto_aead_setauthsize(ctx->child, authsize);
+}
+
+static void crypto_rfc4543_done(struct crypto_async_request *areq, int err)
+{
+ struct aead_request *req = areq->data;
+ struct crypto_aead *aead = crypto_aead_reqtfm(req);
+ struct crypto_rfc4543_req_ctx *rctx = crypto_rfc4543_reqctx(req);
+
+ if (!err) {
+ scatterwalk_map_and_copy(rctx->auth_tag, req->dst,
+ req->cryptlen,
+ crypto_aead_authsize(aead), 1);
+ }
+
+ aead_request_complete(req, err);
+}
+
+static struct aead_request *crypto_rfc4543_crypt(struct aead_request *req,
+ bool enc)
+{
+ struct crypto_aead *aead = crypto_aead_reqtfm(req);
+ struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
+ struct crypto_rfc4543_req_ctx *rctx = crypto_rfc4543_reqctx(req);
+ struct aead_request *subreq = &rctx->subreq;
+ struct scatterlist *src = req->src;
+ struct scatterlist *cipher = rctx->cipher;
+ struct scatterlist *payload = rctx->payload;
+ struct scatterlist *assoc = rctx->assoc;
+ unsigned int authsize = crypto_aead_authsize(aead);
+ unsigned int assoclen = req->assoclen;
+ struct page *srcp;
+ u8 *vsrc;
+ u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child),
+ crypto_aead_alignmask(ctx->child) + 1);
+
+ memcpy(iv, ctx->nonce, 4);
+ memcpy(iv + 4, req->iv, 8);
+
+ /* construct cipher/plaintext */
+ if (enc)
+ memset(rctx->auth_tag, 0, authsize);
+ else
+ scatterwalk_map_and_copy(rctx->auth_tag, src,
+ req->cryptlen - authsize,
+ authsize, 0);
+
+ sg_init_one(cipher, rctx->auth_tag, authsize);
+
+ /* construct the aad */
+ srcp = sg_page(src);
+ vsrc = PageHighMem(srcp) ? NULL : page_address(srcp) + src->offset;
+
+ sg_init_table(payload, 2);
+ sg_set_buf(payload, req->iv, 8);
+ scatterwalk_crypto_chain(payload, src, vsrc == req->iv + 8, 2);
+ assoclen += 8 + req->cryptlen - (enc ? 0 : authsize);
+
+ if (req->assoc->length == req->assoclen) {
+ sg_init_table(assoc, 2);
+ sg_set_page(assoc, sg_page(req->assoc), req->assoc->length,
+ req->assoc->offset);
+ } else {
+ BUG_ON(req->assoclen > sizeof(rctx->assocbuf));
+
+ scatterwalk_map_and_copy(rctx->assocbuf, req->assoc, 0,
+ req->assoclen, 0);
+
+ sg_init_table(assoc, 2);
+ sg_set_buf(assoc, rctx->assocbuf, req->assoclen);
+ }
+ scatterwalk_crypto_chain(assoc, payload, 0, 2);
+
+ aead_request_set_tfm(subreq, ctx->child);
+ aead_request_set_callback(subreq, req->base.flags, crypto_rfc4543_done,
+ req);
+ aead_request_set_crypt(subreq, cipher, cipher, enc ? 0 : authsize, iv);
+ aead_request_set_assoc(subreq, assoc, assoclen);
+
+ return subreq;
+}
+
+static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc)
+{
+ struct crypto_aead *aead = crypto_aead_reqtfm(req);
+ struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
+ unsigned int authsize = crypto_aead_authsize(aead);
+ unsigned int nbytes = req->cryptlen - (enc ? 0 : authsize);
+ struct blkcipher_desc desc = {
+ .tfm = ctx->null,
+ };
+
+ return crypto_blkcipher_encrypt(&desc, req->dst, req->src, nbytes);
+}
+
+static int crypto_rfc4543_encrypt(struct aead_request *req)
+{
+ struct crypto_aead *aead = crypto_aead_reqtfm(req);
+ struct crypto_rfc4543_req_ctx *rctx = crypto_rfc4543_reqctx(req);
+ struct aead_request *subreq;
+ int err;
+
+ if (req->src != req->dst) {
+ err = crypto_rfc4543_copy_src_to_dst(req, true);
+ if (err)
+ return err;
+ }
+
+ subreq = crypto_rfc4543_crypt(req, true);
+ err = crypto_aead_encrypt(subreq);
+ if (err)
+ return err;
+
+ scatterwalk_map_and_copy(rctx->auth_tag, req->dst, req->cryptlen,
+ crypto_aead_authsize(aead), 1);
+
+ return 0;
+}
+
+static int crypto_rfc4543_decrypt(struct aead_request *req)
+{
+ int err;
+
+ if (req->src != req->dst) {
+ err = crypto_rfc4543_copy_src_to_dst(req, false);
+ if (err)
+ return err;
+ }
+
+ req = crypto_rfc4543_crypt(req, false);
+
+ return crypto_aead_decrypt(req);
+}
+
+static int crypto_rfc4543_init_tfm(struct crypto_tfm *tfm)
+{
+ struct crypto_instance *inst = (void *)tfm->__crt_alg;
+ struct crypto_rfc4543_instance_ctx *ictx = crypto_instance_ctx(inst);
+ struct crypto_aead_spawn *spawn = &ictx->aead;
+ struct crypto_rfc4543_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct crypto_aead *aead;
+ struct crypto_blkcipher *null;
+ unsigned long align;
+ int err = 0;
+
+ aead = crypto_spawn_aead(spawn);
+ if (IS_ERR(aead))
+ return PTR_ERR(aead);
+
+ null = crypto_spawn_blkcipher(&ictx->null.base);
+ err = PTR_ERR(null);
+ if (IS_ERR(null))
+ goto err_free_aead;
+
+ ctx->child = aead;
+ ctx->null = null;
+
+ align = crypto_aead_alignmask(aead);
+ align &= ~(crypto_tfm_ctx_alignment() - 1);
+ tfm->crt_aead.reqsize = sizeof(struct crypto_rfc4543_req_ctx) +
+ ALIGN(crypto_aead_reqsize(aead),
+ crypto_tfm_ctx_alignment()) +
+ align + 16;
+
+ return 0;
+
+err_free_aead:
+ crypto_free_aead(aead);
+ return err;
+}
+
+static void crypto_rfc4543_exit_tfm(struct crypto_tfm *tfm)
+{
+ struct crypto_rfc4543_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ crypto_free_aead(ctx->child);
+ crypto_free_blkcipher(ctx->null);
+}
+
+static struct crypto_instance *crypto_rfc4543_alloc(struct rtattr **tb)
+{
+ struct crypto_attr_type *algt;
+ struct crypto_instance *inst;
+ struct crypto_aead_spawn *spawn;
+ struct crypto_alg *alg;
+ struct crypto_rfc4543_instance_ctx *ctx;
+ const char *ccm_name;
+ int err;
+
+ algt = crypto_get_attr_type(tb);
+ if (IS_ERR(algt))
+ return ERR_CAST(algt);
+
+ if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
+ return ERR_PTR(-EINVAL);
+
+ ccm_name = crypto_attr_alg_name(tb[1]);
+ if (IS_ERR(ccm_name))
+ return ERR_CAST(ccm_name);
+
+ inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
+ if (!inst)
+ return ERR_PTR(-ENOMEM);
+
+ ctx = crypto_instance_ctx(inst);
+ spawn = &ctx->aead;
+ crypto_set_aead_spawn(spawn, inst);
+ err = crypto_grab_aead(spawn, ccm_name, 0,
+ crypto_requires_sync(algt->type, algt->mask));
+ if (err)
+ goto out_free_inst;
+
+ alg = crypto_aead_spawn_alg(spawn);
+
+ crypto_set_skcipher_spawn(&ctx->null, inst);
+ err = crypto_grab_skcipher(&ctx->null, "ecb(cipher_null)", 0,
+ CRYPTO_ALG_ASYNC);
+ if (err)
+ goto out_drop_alg;
+
+ crypto_skcipher_spawn_alg(&ctx->null);
+
+ err = -EINVAL;
+
+ /* We only support 16-byte blocks. */
+ if (alg->cra_aead.ivsize != 16)
+ goto out_drop_ecbnull;
+
+ /* Not a stream cipher? */
+ if (alg->cra_blocksize != 1)
+ goto out_drop_ecbnull;
+
+ err = -ENAMETOOLONG;
+ if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
+ "rfc4543(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME ||
+ snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+ "rfc4543(%s)", alg->cra_driver_name) >=
+ CRYPTO_MAX_ALG_NAME)
+ goto out_drop_ecbnull;
+
+ inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
+ inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
+ inst->alg.cra_priority = alg->cra_priority;
+ inst->alg.cra_blocksize = 1;
+ inst->alg.cra_alignmask = alg->cra_alignmask;
+ inst->alg.cra_type = &crypto_nivaead_type;
+
+ inst->alg.cra_aead.ivsize = 8;
+ inst->alg.cra_aead.maxauthsize = 16;
+
+ inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4543_ctx);
+
+ inst->alg.cra_init = crypto_rfc4543_init_tfm;
+ inst->alg.cra_exit = crypto_rfc4543_exit_tfm;
+
+ inst->alg.cra_aead.setkey = crypto_rfc4543_setkey;
+ inst->alg.cra_aead.setauthsize = crypto_rfc4543_setauthsize;
+ inst->alg.cra_aead.encrypt = crypto_rfc4543_encrypt;
+ inst->alg.cra_aead.decrypt = crypto_rfc4543_decrypt;
+
+ inst->alg.cra_aead.geniv = "seqiv";
+
+out:
+ return inst;
+
+out_drop_ecbnull:
+ crypto_drop_skcipher(&ctx->null);
+out_drop_alg:
+ crypto_drop_aead(spawn);
+out_free_inst:
+ kfree(inst);
+ inst = ERR_PTR(err);
+ goto out;
+}
+
+static void crypto_rfc4543_free(struct crypto_instance *inst)
+{
+ struct crypto_rfc4543_instance_ctx *ctx = crypto_instance_ctx(inst);
+
+ crypto_drop_aead(&ctx->aead);
+ crypto_drop_skcipher(&ctx->null);
+
+ kfree(inst);
+}
+
+static struct crypto_template crypto_rfc4543_tmpl = {
+ .name = "rfc4543",
+ .alloc = crypto_rfc4543_alloc,
+ .free = crypto_rfc4543_free,
+ .module = THIS_MODULE,
+};
+
static int __init crypto_gcm_module_init(void)
{
int err;
+ gcm_zeroes = kzalloc(16, GFP_KERNEL);
+ if (!gcm_zeroes)
+ return -ENOMEM;
+
err = crypto_register_template(&crypto_gcm_base_tmpl);
if (err)
goto out;
@@ -796,18 +1409,27 @@ static int __init crypto_gcm_module_init(void)
if (err)
goto out_undo_gcm;
-out:
- return err;
+ err = crypto_register_template(&crypto_rfc4543_tmpl);
+ if (err)
+ goto out_undo_rfc4106;
+ return 0;
+
+out_undo_rfc4106:
+ crypto_unregister_template(&crypto_rfc4106_tmpl);
out_undo_gcm:
crypto_unregister_template(&crypto_gcm_tmpl);
out_undo_base:
crypto_unregister_template(&crypto_gcm_base_tmpl);
- goto out;
+out:
+ kfree(gcm_zeroes);
+ return err;
}
static void __exit crypto_gcm_module_exit(void)
{
+ kfree(gcm_zeroes);
+ crypto_unregister_template(&crypto_rfc4543_tmpl);
crypto_unregister_template(&crypto_rfc4106_tmpl);
crypto_unregister_template(&crypto_gcm_tmpl);
crypto_unregister_template(&crypto_gcm_base_tmpl);
@@ -821,3 +1443,4 @@ MODULE_DESCRIPTION("Galois/Counter Mode");
MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
MODULE_ALIAS("gcm_base");
MODULE_ALIAS("rfc4106");
+MODULE_ALIAS("rfc4543");
diff --git a/crypto/gf128mul.c b/crypto/gf128mul.c
index ecbeaa1f17e..5276607c72d 100644
--- a/crypto/gf128mul.c
+++ b/crypto/gf128mul.c
@@ -4,7 +4,7 @@
* Copyright (c) 2006, Rik Snel <rsnel@cube.dyndns.org>
*
* Based on Dr Brian Gladman's (GPL'd) work published at
- * http://fp.gladman.plus.com/cryptography_technology/index.htm
+ * http://gladman.plushost.co.uk/oldsite/cryptography_technology/index.php
* See the original copyright notice below.
*
* This program is free software; you can redistribute it and/or modify it
@@ -89,7 +89,7 @@
}
/* Given the value i in 0..255 as the byte overflow when a field element
- in GHASH is multipled by x^8, this function will return the values that
+ in GHASH is multiplied by x^8, this function will return the values that
are generated in the lo 16-bit word of the field value by applying the
modular polynomial. The values lo_byte and hi_byte are returned via the
macro xp_fun(lo_byte, hi_byte) so that the values can be assembled into
@@ -182,7 +182,7 @@ void gf128mul_lle(be128 *r, const be128 *b)
for (i = 0; i < 7; ++i)
gf128mul_x_lle(&p[i + 1], &p[i]);
- memset(r, 0, sizeof(r));
+ memset(r, 0, sizeof(*r));
for (i = 0;;) {
u8 ch = ((u8 *)b)[15 - i];
@@ -220,7 +220,7 @@ void gf128mul_bbe(be128 *r, const be128 *b)
for (i = 0; i < 7; ++i)
gf128mul_x_bbe(&p[i + 1], &p[i]);
- memset(r, 0, sizeof(r));
+ memset(r, 0, sizeof(*r));
for (i = 0;;) {
u8 ch = ((u8 *)b)[i];
diff --git a/crypto/ghash-generic.c b/crypto/ghash-generic.c
new file mode 100644
index 00000000000..9d3f0c69a86
--- /dev/null
+++ b/crypto/ghash-generic.c
@@ -0,0 +1,175 @@
+/*
+ * GHASH: digest algorithm for GCM (Galois/Counter Mode).
+ *
+ * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
+ * Copyright (c) 2009 Intel Corp.
+ * Author: Huang Ying <ying.huang@intel.com>
+ *
+ * The algorithm implementation is copied from gcm.c.
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 as published
+ * by the Free Software Foundation.
+ */
+
+#include <crypto/algapi.h>
+#include <crypto/gf128mul.h>
+#include <crypto/internal/hash.h>
+#include <linux/crypto.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+
+#define GHASH_BLOCK_SIZE 16
+#define GHASH_DIGEST_SIZE 16
+
+struct ghash_ctx {
+ struct gf128mul_4k *gf128;
+};
+
+struct ghash_desc_ctx {
+ u8 buffer[GHASH_BLOCK_SIZE];
+ u32 bytes;
+};
+
+static int ghash_init(struct shash_desc *desc)
+{
+ struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
+
+ memset(dctx, 0, sizeof(*dctx));
+
+ return 0;
+}
+
+static int ghash_setkey(struct crypto_shash *tfm,
+ const u8 *key, unsigned int keylen)
+{
+ struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
+
+ if (keylen != GHASH_BLOCK_SIZE) {
+ crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ return -EINVAL;
+ }
+
+ if (ctx->gf128)
+ gf128mul_free_4k(ctx->gf128);
+ ctx->gf128 = gf128mul_init_4k_lle((be128 *)key);
+ if (!ctx->gf128)
+ return -ENOMEM;
+
+ return 0;
+}
+
+static int ghash_update(struct shash_desc *desc,
+ const u8 *src, unsigned int srclen)
+{
+ struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
+ struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
+ u8 *dst = dctx->buffer;
+
+ if (!ctx->gf128)
+ return -ENOKEY;
+
+ if (dctx->bytes) {
+ int n = min(srclen, dctx->bytes);
+ u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
+
+ dctx->bytes -= n;
+ srclen -= n;
+
+ while (n--)
+ *pos++ ^= *src++;
+
+ if (!dctx->bytes)
+ gf128mul_4k_lle((be128 *)dst, ctx->gf128);
+ }
+
+ while (srclen >= GHASH_BLOCK_SIZE) {
+ crypto_xor(dst, src, GHASH_BLOCK_SIZE);
+ gf128mul_4k_lle((be128 *)dst, ctx->gf128);
+ src += GHASH_BLOCK_SIZE;
+ srclen -= GHASH_BLOCK_SIZE;
+ }
+
+ if (srclen) {
+ dctx->bytes = GHASH_BLOCK_SIZE - srclen;
+ while (srclen--)
+ *dst++ ^= *src++;
+ }
+
+ return 0;
+}
+
+static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
+{
+ u8 *dst = dctx->buffer;
+
+ if (dctx->bytes) {
+ u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
+
+ while (dctx->bytes--)
+ *tmp++ ^= 0;
+
+ gf128mul_4k_lle((be128 *)dst, ctx->gf128);
+ }
+
+ dctx->bytes = 0;
+}
+
+static int ghash_final(struct shash_desc *desc, u8 *dst)
+{
+ struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
+ struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
+ u8 *buf = dctx->buffer;
+
+ if (!ctx->gf128)
+ return -ENOKEY;
+
+ ghash_flush(ctx, dctx);
+ memcpy(dst, buf, GHASH_BLOCK_SIZE);
+
+ return 0;
+}
+
+static void ghash_exit_tfm(struct crypto_tfm *tfm)
+{
+ struct ghash_ctx *ctx = crypto_tfm_ctx(tfm);
+ if (ctx->gf128)
+ gf128mul_free_4k(ctx->gf128);
+}
+
+static struct shash_alg ghash_alg = {
+ .digestsize = GHASH_DIGEST_SIZE,
+ .init = ghash_init,
+ .update = ghash_update,
+ .final = ghash_final,
+ .setkey = ghash_setkey,
+ .descsize = sizeof(struct ghash_desc_ctx),
+ .base = {
+ .cra_name = "ghash",
+ .cra_driver_name = "ghash-generic",
+ .cra_priority = 100,
+ .cra_flags = CRYPTO_ALG_TYPE_SHASH,
+ .cra_blocksize = GHASH_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct ghash_ctx),
+ .cra_module = THIS_MODULE,
+ .cra_exit = ghash_exit_tfm,
+ },
+};
+
+static int __init ghash_mod_init(void)
+{
+ return crypto_register_shash(&ghash_alg);
+}
+
+static void __exit ghash_mod_exit(void)
+{
+ crypto_unregister_shash(&ghash_alg);
+}
+
+module_init(ghash_mod_init);
+module_exit(ghash_mod_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("GHASH Message Digest Algorithm");
+MODULE_ALIAS("ghash");
diff --git a/crypto/hash.c b/crypto/hash.c
deleted file mode 100644
index cb86b19fd10..00000000000
--- a/crypto/hash.c
+++ /dev/null
@@ -1,183 +0,0 @@
-/*
- * Cryptographic Hash operations.
- *
- * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
- *
- * This program is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
- * any later version.
- */
-
-#include <crypto/internal/hash.h>
-#include <linux/errno.h>
-#include <linux/kernel.h>
-#include <linux/module.h>
-#include <linux/slab.h>
-#include <linux/seq_file.h>
-
-#include "internal.h"
-
-static unsigned int crypto_hash_ctxsize(struct crypto_alg *alg, u32 type,
- u32 mask)
-{
- return alg->cra_ctxsize;
-}
-
-static int hash_setkey_unaligned(struct crypto_hash *crt, const u8 *key,
- unsigned int keylen)
-{
- struct crypto_tfm *tfm = crypto_hash_tfm(crt);
- struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
- unsigned long alignmask = crypto_hash_alignmask(crt);
- int ret;
- u8 *buffer, *alignbuffer;
- unsigned long absize;
-
- absize = keylen + alignmask;
- buffer = kmalloc(absize, GFP_ATOMIC);
- if (!buffer)
- return -ENOMEM;
-
- alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
- memcpy(alignbuffer, key, keylen);
- ret = alg->setkey(crt, alignbuffer, keylen);
- memset(alignbuffer, 0, keylen);
- kfree(buffer);
- return ret;
-}
-
-static int hash_setkey(struct crypto_hash *crt, const u8 *key,
- unsigned int keylen)
-{
- struct crypto_tfm *tfm = crypto_hash_tfm(crt);
- struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
- unsigned long alignmask = crypto_hash_alignmask(crt);
-
- if ((unsigned long)key & alignmask)
- return hash_setkey_unaligned(crt, key, keylen);
-
- return alg->setkey(crt, key, keylen);
-}
-
-static int hash_async_setkey(struct crypto_ahash *tfm_async, const u8 *key,
- unsigned int keylen)
-{
- struct crypto_tfm *tfm = crypto_ahash_tfm(tfm_async);
- struct crypto_hash *tfm_hash = __crypto_hash_cast(tfm);
- struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
-
- return alg->setkey(tfm_hash, key, keylen);
-}
-
-static int hash_async_init(struct ahash_request *req)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
- struct hash_desc desc = {
- .tfm = __crypto_hash_cast(tfm),
- .flags = req->base.flags,
- };
-
- return alg->init(&desc);
-}
-
-static int hash_async_update(struct ahash_request *req)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
- struct hash_desc desc = {
- .tfm = __crypto_hash_cast(tfm),
- .flags = req->base.flags,
- };
-
- return alg->update(&desc, req->src, req->nbytes);
-}
-
-static int hash_async_final(struct ahash_request *req)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
- struct hash_desc desc = {
- .tfm = __crypto_hash_cast(tfm),
- .flags = req->base.flags,
- };
-
- return alg->final(&desc, req->result);
-}
-
-static int hash_async_digest(struct ahash_request *req)
-{
- struct crypto_tfm *tfm = req->base.tfm;
- struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
- struct hash_desc desc = {
- .tfm = __crypto_hash_cast(tfm),
- .flags = req->base.flags,
- };
-
- return alg->digest(&desc, req->src, req->nbytes, req->result);
-}
-
-static int crypto_init_hash_ops_async(struct crypto_tfm *tfm)
-{
- struct ahash_tfm *crt = &tfm->crt_ahash;
- struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
-
- crt->init = hash_async_init;
- crt->update = hash_async_update;
- crt->final = hash_async_final;
- crt->digest = hash_async_digest;
- crt->setkey = hash_async_setkey;
- crt->digestsize = alg->digestsize;
-
- return 0;
-}
-
-static int crypto_init_hash_ops_sync(struct crypto_tfm *tfm)
-{
- struct hash_tfm *crt = &tfm->crt_hash;
- struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
-
- crt->init = alg->init;
- crt->update = alg->update;
- crt->final = alg->final;
- crt->digest = alg->digest;
- crt->setkey = hash_setkey;
- crt->digestsize = alg->digestsize;
-
- return 0;
-}
-
-static int crypto_init_hash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
-{
- struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
-
- if (alg->digestsize > PAGE_SIZE / 8)
- return -EINVAL;
-
- if ((mask & CRYPTO_ALG_TYPE_HASH_MASK) != CRYPTO_ALG_TYPE_HASH_MASK)
- return crypto_init_hash_ops_async(tfm);
- else
- return crypto_init_hash_ops_sync(tfm);
-}
-
-static void crypto_hash_show(struct seq_file *m, struct crypto_alg *alg)
- __attribute__ ((unused));
-static void crypto_hash_show(struct seq_file *m, struct crypto_alg *alg)
-{
- seq_printf(m, "type : hash\n");
- seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
- seq_printf(m, "digestsize : %u\n", alg->cra_hash.digestsize);
-}
-
-const struct crypto_type crypto_hash_type = {
- .ctxsize = crypto_hash_ctxsize,
- .init = crypto_init_hash_ops,
-#ifdef CONFIG_PROC_FS
- .show = crypto_hash_show,
-#endif
-};
-EXPORT_SYMBOL_GPL(crypto_hash_type);
-
-MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION("Generic cryptographic hash type");
diff --git a/crypto/hash_info.c b/crypto/hash_info.c
new file mode 100644
index 00000000000..3e7ff46f26e
--- /dev/null
+++ b/crypto/hash_info.c
@@ -0,0 +1,56 @@
+/*
+ * Hash Info: Hash algorithms information
+ *
+ * Copyright (c) 2013 Dmitry Kasatkin <d.kasatkin@samsung.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */
+
+#include <linux/export.h>
+#include <crypto/hash_info.h>
+
+const char *const hash_algo_name[HASH_ALGO__LAST] = {
+ [HASH_ALGO_MD4] = "md4",
+ [HASH_ALGO_MD5] = "md5",
+ [HASH_ALGO_SHA1] = "sha1",
+ [HASH_ALGO_RIPE_MD_160] = "rmd160",
+ [HASH_ALGO_SHA256] = "sha256",
+ [HASH_ALGO_SHA384] = "sha384",
+ [HASH_ALGO_SHA512] = "sha512",
+ [HASH_ALGO_SHA224] = "sha224",
+ [HASH_ALGO_RIPE_MD_128] = "rmd128",
+ [HASH_ALGO_RIPE_MD_256] = "rmd256",
+ [HASH_ALGO_RIPE_MD_320] = "rmd320",
+ [HASH_ALGO_WP_256] = "wp256",
+ [HASH_ALGO_WP_384] = "wp384",
+ [HASH_ALGO_WP_512] = "wp512",
+ [HASH_ALGO_TGR_128] = "tgr128",
+ [HASH_ALGO_TGR_160] = "tgr160",
+ [HASH_ALGO_TGR_192] = "tgr192",
+};
+EXPORT_SYMBOL_GPL(hash_algo_name);
+
+const int hash_digest_size[HASH_ALGO__LAST] = {
+ [HASH_ALGO_MD4] = MD5_DIGEST_SIZE,
+ [HASH_ALGO_MD5] = MD5_DIGEST_SIZE,
+ [HASH_ALGO_SHA1] = SHA1_DIGEST_SIZE,
+ [HASH_ALGO_RIPE_MD_160] = RMD160_DIGEST_SIZE,
+ [HASH_ALGO_SHA256] = SHA256_DIGEST_SIZE,
+ [HASH_ALGO_SHA384] = SHA384_DIGEST_SIZE,
+ [HASH_ALGO_SHA512] = SHA512_DIGEST_SIZE,
+ [HASH_ALGO_SHA224] = SHA224_DIGEST_SIZE,
+ [HASH_ALGO_RIPE_MD_128] = RMD128_DIGEST_SIZE,
+ [HASH_ALGO_RIPE_MD_256] = RMD256_DIGEST_SIZE,
+ [HASH_ALGO_RIPE_MD_320] = RMD320_DIGEST_SIZE,
+ [HASH_ALGO_WP_256] = WP256_DIGEST_SIZE,
+ [HASH_ALGO_WP_384] = WP384_DIGEST_SIZE,
+ [HASH_ALGO_WP_512] = WP512_DIGEST_SIZE,
+ [HASH_ALGO_TGR_128] = TGR128_DIGEST_SIZE,
+ [HASH_ALGO_TGR_160] = TGR160_DIGEST_SIZE,
+ [HASH_ALGO_TGR_192] = TGR192_DIGEST_SIZE,
+};
+EXPORT_SYMBOL_GPL(hash_digest_size);
diff --git a/crypto/hmac.c b/crypto/hmac.c
index 0ad39c37496..8d9544cf816 100644
--- a/crypto/hmac.c
+++ b/crypto/hmac.c
@@ -23,11 +23,10 @@
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/scatterlist.h>
-#include <linux/slab.h>
#include <linux/string.h>
struct hmac_ctx {
- struct crypto_hash *child;
+ struct crypto_shash *hash;
};
static inline void *align_ptr(void *p, unsigned int align)
@@ -35,65 +34,45 @@ static inline void *align_ptr(void *p, unsigned int align)
return (void *)ALIGN((unsigned long)p, align);
}
-static inline struct hmac_ctx *hmac_ctx(struct crypto_hash *tfm)
+static inline struct hmac_ctx *hmac_ctx(struct crypto_shash *tfm)
{
- return align_ptr(crypto_hash_ctx_aligned(tfm) +
- crypto_hash_blocksize(tfm) * 2 +
- crypto_hash_digestsize(tfm), sizeof(void *));
+ return align_ptr(crypto_shash_ctx_aligned(tfm) +
+ crypto_shash_statesize(tfm) * 2,
+ crypto_tfm_ctx_alignment());
}
-static int hmac_setkey(struct crypto_hash *parent,
+static int hmac_setkey(struct crypto_shash *parent,
const u8 *inkey, unsigned int keylen)
{
- int bs = crypto_hash_blocksize(parent);
- int ds = crypto_hash_digestsize(parent);
- char *ipad = crypto_hash_ctx_aligned(parent);
- char *opad = ipad + bs;
- char *digest = opad + bs;
- struct hmac_ctx *ctx = align_ptr(digest + ds, sizeof(void *));
- struct crypto_hash *tfm = ctx->child;
+ int bs = crypto_shash_blocksize(parent);
+ int ds = crypto_shash_digestsize(parent);
+ int ss = crypto_shash_statesize(parent);
+ char *ipad = crypto_shash_ctx_aligned(parent);
+ char *opad = ipad + ss;
+ struct hmac_ctx *ctx = align_ptr(opad + ss,
+ crypto_tfm_ctx_alignment());
+ struct crypto_shash *hash = ctx->hash;
+ struct {
+ struct shash_desc shash;
+ char ctx[crypto_shash_descsize(hash)];
+ } desc;
unsigned int i;
+ desc.shash.tfm = hash;
+ desc.shash.flags = crypto_shash_get_flags(parent) &
+ CRYPTO_TFM_REQ_MAY_SLEEP;
+
if (keylen > bs) {
- struct hash_desc desc;
- struct scatterlist tmp;
- int tmplen;
int err;
- desc.tfm = tfm;
- desc.flags = crypto_hash_get_flags(parent);
- desc.flags &= CRYPTO_TFM_REQ_MAY_SLEEP;
-
- err = crypto_hash_init(&desc);
+ err = crypto_shash_digest(&desc.shash, inkey, keylen, ipad);
if (err)
return err;
- tmplen = bs * 2 + ds;
- sg_init_one(&tmp, ipad, tmplen);
-
- for (; keylen > tmplen; inkey += tmplen, keylen -= tmplen) {
- memcpy(ipad, inkey, tmplen);
- err = crypto_hash_update(&desc, &tmp, tmplen);
- if (err)
- return err;
- }
-
- if (keylen) {
- memcpy(ipad, inkey, keylen);
- err = crypto_hash_update(&desc, &tmp, keylen);
- if (err)
- return err;
- }
-
- err = crypto_hash_final(&desc, digest);
- if (err)
- return err;
-
- inkey = digest;
keylen = ds;
- }
+ } else
+ memcpy(ipad, inkey, keylen);
- memcpy(ipad, inkey, keylen);
memset(ipad + keylen, 0, bs - keylen);
memcpy(opad, ipad, bs);
@@ -102,184 +81,178 @@ static int hmac_setkey(struct crypto_hash *parent,
opad[i] ^= 0x5c;
}
- return 0;
+ return crypto_shash_init(&desc.shash) ?:
+ crypto_shash_update(&desc.shash, ipad, bs) ?:
+ crypto_shash_export(&desc.shash, ipad) ?:
+ crypto_shash_init(&desc.shash) ?:
+ crypto_shash_update(&desc.shash, opad, bs) ?:
+ crypto_shash_export(&desc.shash, opad);
}
-static int hmac_init(struct hash_desc *pdesc)
+static int hmac_export(struct shash_desc *pdesc, void *out)
{
- struct crypto_hash *parent = pdesc->tfm;
- int bs = crypto_hash_blocksize(parent);
- int ds = crypto_hash_digestsize(parent);
- char *ipad = crypto_hash_ctx_aligned(parent);
- struct hmac_ctx *ctx = align_ptr(ipad + bs * 2 + ds, sizeof(void *));
- struct hash_desc desc;
- struct scatterlist tmp;
- int err;
+ struct shash_desc *desc = shash_desc_ctx(pdesc);
- desc.tfm = ctx->child;
- desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
- sg_init_one(&tmp, ipad, bs);
+ desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
- err = crypto_hash_init(&desc);
- if (unlikely(err))
- return err;
-
- return crypto_hash_update(&desc, &tmp, bs);
+ return crypto_shash_export(desc, out);
}
-static int hmac_update(struct hash_desc *pdesc,
- struct scatterlist *sg, unsigned int nbytes)
+static int hmac_import(struct shash_desc *pdesc, const void *in)
{
+ struct shash_desc *desc = shash_desc_ctx(pdesc);
struct hmac_ctx *ctx = hmac_ctx(pdesc->tfm);
- struct hash_desc desc;
- desc.tfm = ctx->child;
- desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
+ desc->tfm = ctx->hash;
+ desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
- return crypto_hash_update(&desc, sg, nbytes);
+ return crypto_shash_import(desc, in);
}
-static int hmac_final(struct hash_desc *pdesc, u8 *out)
+static int hmac_init(struct shash_desc *pdesc)
{
- struct crypto_hash *parent = pdesc->tfm;
- int bs = crypto_hash_blocksize(parent);
- int ds = crypto_hash_digestsize(parent);
- char *opad = crypto_hash_ctx_aligned(parent) + bs;
- char *digest = opad + bs;
- struct hmac_ctx *ctx = align_ptr(digest + ds, sizeof(void *));
- struct hash_desc desc;
- struct scatterlist tmp;
- int err;
+ return hmac_import(pdesc, crypto_shash_ctx_aligned(pdesc->tfm));
+}
- desc.tfm = ctx->child;
- desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
- sg_init_one(&tmp, opad, bs + ds);
+static int hmac_update(struct shash_desc *pdesc,
+ const u8 *data, unsigned int nbytes)
+{
+ struct shash_desc *desc = shash_desc_ctx(pdesc);
- err = crypto_hash_final(&desc, digest);
- if (unlikely(err))
- return err;
+ desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
- return crypto_hash_digest(&desc, &tmp, bs + ds, out);
+ return crypto_shash_update(desc, data, nbytes);
}
-static int hmac_digest(struct hash_desc *pdesc, struct scatterlist *sg,
- unsigned int nbytes, u8 *out)
+static int hmac_final(struct shash_desc *pdesc, u8 *out)
{
- struct crypto_hash *parent = pdesc->tfm;
- int bs = crypto_hash_blocksize(parent);
- int ds = crypto_hash_digestsize(parent);
- char *ipad = crypto_hash_ctx_aligned(parent);
- char *opad = ipad + bs;
- char *digest = opad + bs;
- struct hmac_ctx *ctx = align_ptr(digest + ds, sizeof(void *));
- struct hash_desc desc;
- struct scatterlist sg1[2];
- struct scatterlist sg2[1];
- int err;
+ struct crypto_shash *parent = pdesc->tfm;
+ int ds = crypto_shash_digestsize(parent);
+ int ss = crypto_shash_statesize(parent);
+ char *opad = crypto_shash_ctx_aligned(parent) + ss;
+ struct shash_desc *desc = shash_desc_ctx(pdesc);
- desc.tfm = ctx->child;
- desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
+ desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
- sg_init_table(sg1, 2);
- sg_set_buf(sg1, ipad, bs);
- scatterwalk_sg_chain(sg1, 2, sg);
+ return crypto_shash_final(desc, out) ?:
+ crypto_shash_import(desc, opad) ?:
+ crypto_shash_finup(desc, out, ds, out);
+}
- sg_init_table(sg2, 1);
- sg_set_buf(sg2, opad, bs + ds);
+static int hmac_finup(struct shash_desc *pdesc, const u8 *data,
+ unsigned int nbytes, u8 *out)
+{
- err = crypto_hash_digest(&desc, sg1, nbytes + bs, digest);
- if (unlikely(err))
- return err;
+ struct crypto_shash *parent = pdesc->tfm;
+ int ds = crypto_shash_digestsize(parent);
+ int ss = crypto_shash_statesize(parent);
+ char *opad = crypto_shash_ctx_aligned(parent) + ss;
+ struct shash_desc *desc = shash_desc_ctx(pdesc);
- return crypto_hash_digest(&desc, sg2, bs + ds, out);
+ desc->flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
+
+ return crypto_shash_finup(desc, data, nbytes, out) ?:
+ crypto_shash_import(desc, opad) ?:
+ crypto_shash_finup(desc, out, ds, out);
}
static int hmac_init_tfm(struct crypto_tfm *tfm)
{
- struct crypto_hash *hash;
+ struct crypto_shash *parent = __crypto_shash_cast(tfm);
+ struct crypto_shash *hash;
struct crypto_instance *inst = (void *)tfm->__crt_alg;
- struct crypto_spawn *spawn = crypto_instance_ctx(inst);
- struct hmac_ctx *ctx = hmac_ctx(__crypto_hash_cast(tfm));
+ struct crypto_shash_spawn *spawn = crypto_instance_ctx(inst);
+ struct hmac_ctx *ctx = hmac_ctx(parent);
- hash = crypto_spawn_hash(spawn);
+ hash = crypto_spawn_shash(spawn);
if (IS_ERR(hash))
return PTR_ERR(hash);
- ctx->child = hash;
+ parent->descsize = sizeof(struct shash_desc) +
+ crypto_shash_descsize(hash);
+
+ ctx->hash = hash;
return 0;
}
static void hmac_exit_tfm(struct crypto_tfm *tfm)
{
- struct hmac_ctx *ctx = hmac_ctx(__crypto_hash_cast(tfm));
- crypto_free_hash(ctx->child);
+ struct hmac_ctx *ctx = hmac_ctx(__crypto_shash_cast(tfm));
+ crypto_free_shash(ctx->hash);
}
-static void hmac_free(struct crypto_instance *inst)
+static int hmac_create(struct crypto_template *tmpl, struct rtattr **tb)
{
- crypto_drop_spawn(crypto_instance_ctx(inst));
- kfree(inst);
-}
-
-static struct crypto_instance *hmac_alloc(struct rtattr **tb)
-{
- struct crypto_instance *inst;
+ struct shash_instance *inst;
struct crypto_alg *alg;
+ struct shash_alg *salg;
int err;
int ds;
+ int ss;
- err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_HASH);
+ err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
if (err)
- return ERR_PTR(err);
-
- alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_HASH,
- CRYPTO_ALG_TYPE_HASH_MASK);
- if (IS_ERR(alg))
- return ERR_CAST(alg);
-
- inst = ERR_PTR(-EINVAL);
- ds = alg->cra_type == &crypto_hash_type ?
- alg->cra_hash.digestsize :
- alg->cra_type ?
- __crypto_shash_alg(alg)->digestsize :
- alg->cra_digest.dia_digestsize;
- if (ds > alg->cra_blocksize)
+ return err;
+
+ salg = shash_attr_alg(tb[1], 0, 0);
+ if (IS_ERR(salg))
+ return PTR_ERR(salg);
+
+ err = -EINVAL;
+ ds = salg->digestsize;
+ ss = salg->statesize;
+ alg = &salg->base;
+ if (ds > alg->cra_blocksize ||
+ ss < alg->cra_blocksize)
goto out_put_alg;
- inst = crypto_alloc_instance("hmac", alg);
+ inst = shash_alloc_instance("hmac", alg);
+ err = PTR_ERR(inst);
if (IS_ERR(inst))
goto out_put_alg;
- inst->alg.cra_flags = CRYPTO_ALG_TYPE_HASH;
- inst->alg.cra_priority = alg->cra_priority;
- inst->alg.cra_blocksize = alg->cra_blocksize;
- inst->alg.cra_alignmask = alg->cra_alignmask;
- inst->alg.cra_type = &crypto_hash_type;
-
- inst->alg.cra_hash.digestsize = ds;
-
- inst->alg.cra_ctxsize = sizeof(struct hmac_ctx) +
- ALIGN(inst->alg.cra_blocksize * 2 + ds,
- sizeof(void *));
-
- inst->alg.cra_init = hmac_init_tfm;
- inst->alg.cra_exit = hmac_exit_tfm;
-
- inst->alg.cra_hash.init = hmac_init;
- inst->alg.cra_hash.update = hmac_update;
- inst->alg.cra_hash.final = hmac_final;
- inst->alg.cra_hash.digest = hmac_digest;
- inst->alg.cra_hash.setkey = hmac_setkey;
+ err = crypto_init_shash_spawn(shash_instance_ctx(inst), salg,
+ shash_crypto_instance(inst));
+ if (err)
+ goto out_free_inst;
+
+ inst->alg.base.cra_priority = alg->cra_priority;
+ inst->alg.base.cra_blocksize = alg->cra_blocksize;
+ inst->alg.base.cra_alignmask = alg->cra_alignmask;
+
+ ss = ALIGN(ss, alg->cra_alignmask + 1);
+ inst->alg.digestsize = ds;
+ inst->alg.statesize = ss;
+
+ inst->alg.base.cra_ctxsize = sizeof(struct hmac_ctx) +
+ ALIGN(ss * 2, crypto_tfm_ctx_alignment());
+
+ inst->alg.base.cra_init = hmac_init_tfm;
+ inst->alg.base.cra_exit = hmac_exit_tfm;
+
+ inst->alg.init = hmac_init;
+ inst->alg.update = hmac_update;
+ inst->alg.final = hmac_final;
+ inst->alg.finup = hmac_finup;
+ inst->alg.export = hmac_export;
+ inst->alg.import = hmac_import;
+ inst->alg.setkey = hmac_setkey;
+
+ err = shash_register_instance(tmpl, inst);
+ if (err) {
+out_free_inst:
+ shash_free_instance(shash_crypto_instance(inst));
+ }
out_put_alg:
crypto_mod_put(alg);
- return inst;
+ return err;
}
static struct crypto_template hmac_tmpl = {
.name = "hmac",
- .alloc = hmac_alloc,
- .free = hmac_free,
+ .create = hmac_create,
+ .free = shash_free_instance,
.module = THIS_MODULE,
};
diff --git a/crypto/internal.h b/crypto/internal.h
index 3c19a27a756..bd39bfc92ea 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -6,7 +6,7 @@
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
+ * Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
@@ -25,12 +25,7 @@
#include <linux/notifier.h>
#include <linux/rwsem.h>
#include <linux/slab.h>
-
-#ifdef CONFIG_CRYPTO_FIPS
-extern int fips_enabled;
-#else
-#define fips_enabled 0
-#endif
+#include <linux/fips.h>
/* Crypto notification events. */
enum {
@@ -65,18 +60,6 @@ static inline void crypto_exit_proc(void)
{ }
#endif
-static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg)
-{
- unsigned int len = alg->cra_ctxsize;
-
- if (alg->cra_alignmask) {
- len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1);
- len += alg->cra_digest.dia_digestsize;
- }
-
- return len;
-}
-
static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg)
{
return alg->cra_ctxsize;
@@ -91,36 +74,40 @@ struct crypto_alg *crypto_mod_get(struct crypto_alg *alg);
struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask);
struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask);
-int crypto_init_digest_ops(struct crypto_tfm *tfm);
-int crypto_init_digest_ops_async(struct crypto_tfm *tfm);
int crypto_init_cipher_ops(struct crypto_tfm *tfm);
int crypto_init_compress_ops(struct crypto_tfm *tfm);
-void crypto_exit_digest_ops(struct crypto_tfm *tfm);
void crypto_exit_cipher_ops(struct crypto_tfm *tfm);
void crypto_exit_compress_ops(struct crypto_tfm *tfm);
struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask);
void crypto_larval_kill(struct crypto_alg *alg);
struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask);
-void crypto_larval_error(const char *name, u32 type, u32 mask);
void crypto_alg_tested(const char *name, int err);
+void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
+ struct crypto_alg *nalg);
+void crypto_remove_final(struct list_head *list);
void crypto_shoot_alg(struct crypto_alg *alg);
struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
u32 mask);
-struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
- const struct crypto_type *frontend);
-
-int crypto_register_instance(struct crypto_template *tmpl,
- struct crypto_instance *inst);
+void *crypto_create_tfm(struct crypto_alg *alg,
+ const struct crypto_type *frontend);
+struct crypto_alg *crypto_find_alg(const char *alg_name,
+ const struct crypto_type *frontend,
+ u32 type, u32 mask);
+void *crypto_alloc_tfm(const char *alg_name,
+ const struct crypto_type *frontend, u32 type, u32 mask);
int crypto_register_notifier(struct notifier_block *nb);
int crypto_unregister_notifier(struct notifier_block *nb);
int crypto_probing_notify(unsigned long val, void *v);
-int __init testmgr_init(void);
-void testmgr_exit(void);
+static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg)
+{
+ atomic_inc(&alg->cra_refcnt);
+ return alg;
+}
static inline void crypto_alg_put(struct crypto_alg *alg)
{
diff --git a/crypto/khazad.c b/crypto/khazad.c
index 527e4e395fc..60e7cd66fac 100644
--- a/crypto/khazad.c
+++ b/crypto/khazad.c
@@ -853,7 +853,6 @@ static struct crypto_alg khazad_alg = {
.cra_ctxsize = sizeof (struct khazad_ctx),
.cra_alignmask = 7,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(khazad_alg.cra_list),
.cra_u = { .cipher = {
.cia_min_keysize = KHAZAD_KEY_SIZE,
.cia_max_keysize = KHAZAD_KEY_SIZE,
diff --git a/crypto/krng.c b/crypto/krng.c
index 4328bb3430e..a2d2b72fc13 100644
--- a/crypto/krng.c
+++ b/crypto/krng.c
@@ -35,7 +35,6 @@ static struct crypto_alg krng_alg = {
.cra_ctxsize = 0,
.cra_type = &crypto_rng_type,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(krng_alg.cra_list),
.cra_u = {
.rng = {
.rng_make_random = krng_get_random,
diff --git a/crypto/lrw.c b/crypto/lrw.c
index 358f80be2bf..ba42acc4deb 100644
--- a/crypto/lrw.c
+++ b/crypto/lrw.c
@@ -3,7 +3,7 @@
*
* Copyright (c) 2006 Rik Snel <rsnel@cube.dyndns.org>
*
- * Based om ecb.c
+ * Based on ecb.c
* Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
*
* This program is free software; you can redistribute it and/or modify it
@@ -16,6 +16,7 @@
* http://www.mail-archive.com/stds-p1619@listserv.ieee.org/msg00173.html
*
* The test vectors are included in the testing module tcrypt.[ch] */
+
#include <crypto/algapi.h>
#include <linux/err.h>
#include <linux/init.h>
@@ -26,21 +27,11 @@
#include <crypto/b128ops.h>
#include <crypto/gf128mul.h>
+#include <crypto/lrw.h>
struct priv {
struct crypto_cipher *child;
- /* optimizes multiplying a random (non incrementing, as at the
- * start of a new sector) value with key2, we could also have
- * used 4k optimization tables or no optimization at all. In the
- * latter case we would have to store key2 here */
- struct gf128mul_64k *table;
- /* stores:
- * key2*{ 0,0,...0,0,0,0,1 }, key2*{ 0,0,...0,0,0,1,1 },
- * key2*{ 0,0,...0,0,1,1,1 }, key2*{ 0,0,...0,1,1,1,1 }
- * key2*{ 0,0,...1,1,1,1,1 }, etc
- * needed for optimized multiplication of incrementing values
- * with key2 */
- be128 mulinc[128];
+ struct lrw_table_ctx table;
};
static inline void setbit128_bbe(void *b, int bit)
@@ -54,28 +45,16 @@ static inline void setbit128_bbe(void *b, int bit)
), b);
}
-static int setkey(struct crypto_tfm *parent, const u8 *key,
- unsigned int keylen)
+int lrw_init_table(struct lrw_table_ctx *ctx, const u8 *tweak)
{
- struct priv *ctx = crypto_tfm_ctx(parent);
- struct crypto_cipher *child = ctx->child;
- int err, i;
be128 tmp = { 0 };
- int bsize = crypto_cipher_blocksize(child);
-
- crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
- crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
- CRYPTO_TFM_REQ_MASK);
- if ((err = crypto_cipher_setkey(child, key, keylen - bsize)))
- return err;
- crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
- CRYPTO_TFM_RES_MASK);
+ int i;
if (ctx->table)
gf128mul_free_64k(ctx->table);
/* initialize multiplication table for Key2 */
- ctx->table = gf128mul_init_64k_bbe((be128 *)(key + keylen - bsize));
+ ctx->table = gf128mul_init_64k_bbe((be128 *)tweak);
if (!ctx->table)
return -ENOMEM;
@@ -88,6 +67,34 @@ static int setkey(struct crypto_tfm *parent, const u8 *key,
return 0;
}
+EXPORT_SYMBOL_GPL(lrw_init_table);
+
+void lrw_free_table(struct lrw_table_ctx *ctx)
+{
+ if (ctx->table)
+ gf128mul_free_64k(ctx->table);
+}
+EXPORT_SYMBOL_GPL(lrw_free_table);
+
+static int setkey(struct crypto_tfm *parent, const u8 *key,
+ unsigned int keylen)
+{
+ struct priv *ctx = crypto_tfm_ctx(parent);
+ struct crypto_cipher *child = ctx->child;
+ int err, bsize = LRW_BLOCK_SIZE;
+ const u8 *tweak = key + keylen - bsize;
+
+ crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
+ crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
+ CRYPTO_TFM_REQ_MASK);
+ err = crypto_cipher_setkey(child, key, keylen - bsize);
+ if (err)
+ return err;
+ crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
+ CRYPTO_TFM_RES_MASK);
+
+ return lrw_init_table(&ctx->table, tweak);
+}
struct sinfo {
be128 t;
@@ -134,7 +141,7 @@ static int crypt(struct blkcipher_desc *d,
{
int err;
unsigned int avail;
- const int bs = crypto_cipher_blocksize(ctx->child);
+ const int bs = LRW_BLOCK_SIZE;
struct sinfo s = {
.tfm = crypto_cipher_tfm(ctx->child),
.fn = fn
@@ -155,7 +162,7 @@ static int crypt(struct blkcipher_desc *d,
s.t = *iv;
/* T <- I*Key2 */
- gf128mul_64k_bbe(&s.t, ctx->table);
+ gf128mul_64k_bbe(&s.t, ctx->table.table);
goto first;
@@ -163,7 +170,8 @@ static int crypt(struct blkcipher_desc *d,
do {
/* T <- I*Key2, using the optimization
* discussed in the specification */
- be128_xor(&s.t, &s.t, &ctx->mulinc[get_index128(iv)]);
+ be128_xor(&s.t, &s.t,
+ &ctx->table.mulinc[get_index128(iv)]);
inc(iv);
first:
@@ -206,6 +214,85 @@ static int decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
crypto_cipher_alg(ctx->child)->cia_decrypt);
}
+int lrw_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst,
+ struct scatterlist *ssrc, unsigned int nbytes,
+ struct lrw_crypt_req *req)
+{
+ const unsigned int bsize = LRW_BLOCK_SIZE;
+ const unsigned int max_blks = req->tbuflen / bsize;
+ struct lrw_table_ctx *ctx = req->table_ctx;
+ struct blkcipher_walk walk;
+ unsigned int nblocks;
+ be128 *iv, *src, *dst, *t;
+ be128 *t_buf = req->tbuf;
+ int err, i;
+
+ BUG_ON(max_blks < 1);
+
+ blkcipher_walk_init(&walk, sdst, ssrc, nbytes);
+
+ err = blkcipher_walk_virt(desc, &walk);
+ nbytes = walk.nbytes;
+ if (!nbytes)
+ return err;
+
+ nblocks = min(walk.nbytes / bsize, max_blks);
+ src = (be128 *)walk.src.virt.addr;
+ dst = (be128 *)walk.dst.virt.addr;
+
+ /* calculate first value of T */
+ iv = (be128 *)walk.iv;
+ t_buf[0] = *iv;
+
+ /* T <- I*Key2 */
+ gf128mul_64k_bbe(&t_buf[0], ctx->table);
+
+ i = 0;
+ goto first;
+
+ for (;;) {
+ do {
+ for (i = 0; i < nblocks; i++) {
+ /* T <- I*Key2, using the optimization
+ * discussed in the specification */
+ be128_xor(&t_buf[i], t,
+ &ctx->mulinc[get_index128(iv)]);
+ inc(iv);
+first:
+ t = &t_buf[i];
+
+ /* PP <- T xor P */
+ be128_xor(dst + i, t, src + i);
+ }
+
+ /* CC <- E(Key2,PP) */
+ req->crypt_fn(req->crypt_ctx, (u8 *)dst,
+ nblocks * bsize);
+
+ /* C <- T xor CC */
+ for (i = 0; i < nblocks; i++)
+ be128_xor(dst + i, dst + i, &t_buf[i]);
+
+ src += nblocks;
+ dst += nblocks;
+ nbytes -= nblocks * bsize;
+ nblocks = min(nbytes / bsize, max_blks);
+ } while (nblocks > 0);
+
+ err = blkcipher_walk_done(desc, &walk, nbytes);
+ nbytes = walk.nbytes;
+ if (!nbytes)
+ break;
+
+ nblocks = min(nbytes / bsize, max_blks);
+ src = (be128 *)walk.src.virt.addr;
+ dst = (be128 *)walk.dst.virt.addr;
+ }
+
+ return err;
+}
+EXPORT_SYMBOL_GPL(lrw_crypt);
+
static int init_tfm(struct crypto_tfm *tfm)
{
struct crypto_cipher *cipher;
@@ -218,8 +305,9 @@ static int init_tfm(struct crypto_tfm *tfm)
if (IS_ERR(cipher))
return PTR_ERR(cipher);
- if (crypto_cipher_blocksize(cipher) != 16) {
+ if (crypto_cipher_blocksize(cipher) != LRW_BLOCK_SIZE) {
*flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
+ crypto_free_cipher(cipher);
return -EINVAL;
}
@@ -230,8 +318,8 @@ static int init_tfm(struct crypto_tfm *tfm)
static void exit_tfm(struct crypto_tfm *tfm)
{
struct priv *ctx = crypto_tfm_ctx(tfm);
- if (ctx->table)
- gf128mul_free_64k(ctx->table);
+
+ lrw_free_table(&ctx->table);
crypto_free_cipher(ctx->child);
}
diff --git a/crypto/lz4.c b/crypto/lz4.c
new file mode 100644
index 00000000000..4586dd15b0d
--- /dev/null
+++ b/crypto/lz4.c
@@ -0,0 +1,106 @@
+/*
+ * Cryptographic API.
+ *
+ * Copyright (c) 2013 Chanho Min <chanho.min@lge.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 as published by
+ * the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc., 51
+ * Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ */
+
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/crypto.h>
+#include <linux/vmalloc.h>
+#include <linux/lz4.h>
+
+struct lz4_ctx {
+ void *lz4_comp_mem;
+};
+
+static int lz4_init(struct crypto_tfm *tfm)
+{
+ struct lz4_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ ctx->lz4_comp_mem = vmalloc(LZ4_MEM_COMPRESS);
+ if (!ctx->lz4_comp_mem)
+ return -ENOMEM;
+
+ return 0;
+}
+
+static void lz4_exit(struct crypto_tfm *tfm)
+{
+ struct lz4_ctx *ctx = crypto_tfm_ctx(tfm);
+ vfree(ctx->lz4_comp_mem);
+}
+
+static int lz4_compress_crypto(struct crypto_tfm *tfm, const u8 *src,
+ unsigned int slen, u8 *dst, unsigned int *dlen)
+{
+ struct lz4_ctx *ctx = crypto_tfm_ctx(tfm);
+ size_t tmp_len = *dlen;
+ int err;
+
+ err = lz4_compress(src, slen, dst, &tmp_len, ctx->lz4_comp_mem);
+
+ if (err < 0)
+ return -EINVAL;
+
+ *dlen = tmp_len;
+ return 0;
+}
+
+static int lz4_decompress_crypto(struct crypto_tfm *tfm, const u8 *src,
+ unsigned int slen, u8 *dst, unsigned int *dlen)
+{
+ int err;
+ size_t tmp_len = *dlen;
+ size_t __slen = slen;
+
+ err = lz4_decompress(src, &__slen, dst, tmp_len);
+ if (err < 0)
+ return -EINVAL;
+
+ *dlen = tmp_len;
+ return err;
+}
+
+static struct crypto_alg alg_lz4 = {
+ .cra_name = "lz4",
+ .cra_flags = CRYPTO_ALG_TYPE_COMPRESS,
+ .cra_ctxsize = sizeof(struct lz4_ctx),
+ .cra_module = THIS_MODULE,
+ .cra_list = LIST_HEAD_INIT(alg_lz4.cra_list),
+ .cra_init = lz4_init,
+ .cra_exit = lz4_exit,
+ .cra_u = { .compress = {
+ .coa_compress = lz4_compress_crypto,
+ .coa_decompress = lz4_decompress_crypto } }
+};
+
+static int __init lz4_mod_init(void)
+{
+ return crypto_register_alg(&alg_lz4);
+}
+
+static void __exit lz4_mod_fini(void)
+{
+ crypto_unregister_alg(&alg_lz4);
+}
+
+module_init(lz4_mod_init);
+module_exit(lz4_mod_fini);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("LZ4 Compression Algorithm");
diff --git a/crypto/lz4hc.c b/crypto/lz4hc.c
new file mode 100644
index 00000000000..151ba31d34e
--- /dev/null
+++ b/crypto/lz4hc.c
@@ -0,0 +1,106 @@
+/*
+ * Cryptographic API.
+ *
+ * Copyright (c) 2013 Chanho Min <chanho.min@lge.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 as published by
+ * the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc., 51
+ * Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ */
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/crypto.h>
+#include <linux/vmalloc.h>
+#include <linux/lz4.h>
+
+struct lz4hc_ctx {
+ void *lz4hc_comp_mem;
+};
+
+static int lz4hc_init(struct crypto_tfm *tfm)
+{
+ struct lz4hc_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ ctx->lz4hc_comp_mem = vmalloc(LZ4HC_MEM_COMPRESS);
+ if (!ctx->lz4hc_comp_mem)
+ return -ENOMEM;
+
+ return 0;
+}
+
+static void lz4hc_exit(struct crypto_tfm *tfm)
+{
+ struct lz4hc_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ vfree(ctx->lz4hc_comp_mem);
+}
+
+static int lz4hc_compress_crypto(struct crypto_tfm *tfm, const u8 *src,
+ unsigned int slen, u8 *dst, unsigned int *dlen)
+{
+ struct lz4hc_ctx *ctx = crypto_tfm_ctx(tfm);
+ size_t tmp_len = *dlen;
+ int err;
+
+ err = lz4hc_compress(src, slen, dst, &tmp_len, ctx->lz4hc_comp_mem);
+
+ if (err < 0)
+ return -EINVAL;
+
+ *dlen = tmp_len;
+ return 0;
+}
+
+static int lz4hc_decompress_crypto(struct crypto_tfm *tfm, const u8 *src,
+ unsigned int slen, u8 *dst, unsigned int *dlen)
+{
+ int err;
+ size_t tmp_len = *dlen;
+ size_t __slen = slen;
+
+ err = lz4_decompress(src, &__slen, dst, tmp_len);
+ if (err < 0)
+ return -EINVAL;
+
+ *dlen = tmp_len;
+ return err;
+}
+
+static struct crypto_alg alg_lz4hc = {
+ .cra_name = "lz4hc",
+ .cra_flags = CRYPTO_ALG_TYPE_COMPRESS,
+ .cra_ctxsize = sizeof(struct lz4hc_ctx),
+ .cra_module = THIS_MODULE,
+ .cra_list = LIST_HEAD_INIT(alg_lz4hc.cra_list),
+ .cra_init = lz4hc_init,
+ .cra_exit = lz4hc_exit,
+ .cra_u = { .compress = {
+ .coa_compress = lz4hc_compress_crypto,
+ .coa_decompress = lz4hc_decompress_crypto } }
+};
+
+static int __init lz4hc_mod_init(void)
+{
+ return crypto_register_alg(&alg_lz4hc);
+}
+
+static void __exit lz4hc_mod_fini(void)
+{
+ crypto_unregister_alg(&alg_lz4hc);
+}
+
+module_init(lz4hc_mod_init);
+module_exit(lz4hc_mod_fini);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("LZ4HC Compression Algorithm");
diff --git a/crypto/lzo.c b/crypto/lzo.c
index b5e77077d75..1c2aa69c54b 100644
--- a/crypto/lzo.c
+++ b/crypto/lzo.c
@@ -81,7 +81,6 @@ static struct crypto_alg alg = {
.cra_flags = CRYPTO_ALG_TYPE_COMPRESS,
.cra_ctxsize = sizeof(struct lzo_ctx),
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(alg.cra_list),
.cra_init = lzo_init,
.cra_exit = lzo_exit,
.cra_u = { .compress = {
diff --git a/crypto/md4.c b/crypto/md4.c
index 7fca1f59a4f..0477a6a01d5 100644
--- a/crypto/md4.c
+++ b/crypto/md4.c
@@ -23,6 +23,7 @@
#include <crypto/internal/hash.h>
#include <linux/init.h>
#include <linux/kernel.h>
+#include <linux/module.h>
#include <linux/string.h>
#include <linux/types.h>
#include <asm/byteorder.h>
diff --git a/crypto/md5.c b/crypto/md5.c
index 83eb5296175..7febeaab923 100644
--- a/crypto/md5.c
+++ b/crypto/md5.c
@@ -16,114 +16,14 @@
*
*/
#include <crypto/internal/hash.h>
+#include <crypto/md5.h>
#include <linux/init.h>
#include <linux/module.h>
#include <linux/string.h>
#include <linux/types.h>
+#include <linux/cryptohash.h>
#include <asm/byteorder.h>
-#define MD5_DIGEST_SIZE 16
-#define MD5_HMAC_BLOCK_SIZE 64
-#define MD5_BLOCK_WORDS 16
-#define MD5_HASH_WORDS 4
-
-#define F1(x, y, z) (z ^ (x & (y ^ z)))
-#define F2(x, y, z) F1(z, x, y)
-#define F3(x, y, z) (x ^ y ^ z)
-#define F4(x, y, z) (y ^ (x | ~z))
-
-#define MD5STEP(f, w, x, y, z, in, s) \
- (w += f(x, y, z) + in, w = (w<<s | w>>(32-s)) + x)
-
-struct md5_ctx {
- u32 hash[MD5_HASH_WORDS];
- u32 block[MD5_BLOCK_WORDS];
- u64 byte_count;
-};
-
-static void md5_transform(u32 *hash, u32 const *in)
-{
- u32 a, b, c, d;
-
- a = hash[0];
- b = hash[1];
- c = hash[2];
- d = hash[3];
-
- MD5STEP(F1, a, b, c, d, in[0] + 0xd76aa478, 7);
- MD5STEP(F1, d, a, b, c, in[1] + 0xe8c7b756, 12);
- MD5STEP(F1, c, d, a, b, in[2] + 0x242070db, 17);
- MD5STEP(F1, b, c, d, a, in[3] + 0xc1bdceee, 22);
- MD5STEP(F1, a, b, c, d, in[4] + 0xf57c0faf, 7);
- MD5STEP(F1, d, a, b, c, in[5] + 0x4787c62a, 12);
- MD5STEP(F1, c, d, a, b, in[6] + 0xa8304613, 17);
- MD5STEP(F1, b, c, d, a, in[7] + 0xfd469501, 22);
- MD5STEP(F1, a, b, c, d, in[8] + 0x698098d8, 7);
- MD5STEP(F1, d, a, b, c, in[9] + 0x8b44f7af, 12);
- MD5STEP(F1, c, d, a, b, in[10] + 0xffff5bb1, 17);
- MD5STEP(F1, b, c, d, a, in[11] + 0x895cd7be, 22);
- MD5STEP(F1, a, b, c, d, in[12] + 0x6b901122, 7);
- MD5STEP(F1, d, a, b, c, in[13] + 0xfd987193, 12);
- MD5STEP(F1, c, d, a, b, in[14] + 0xa679438e, 17);
- MD5STEP(F1, b, c, d, a, in[15] + 0x49b40821, 22);
-
- MD5STEP(F2, a, b, c, d, in[1] + 0xf61e2562, 5);
- MD5STEP(F2, d, a, b, c, in[6] + 0xc040b340, 9);
- MD5STEP(F2, c, d, a, b, in[11] + 0x265e5a51, 14);
- MD5STEP(F2, b, c, d, a, in[0] + 0xe9b6c7aa, 20);
- MD5STEP(F2, a, b, c, d, in[5] + 0xd62f105d, 5);
- MD5STEP(F2, d, a, b, c, in[10] + 0x02441453, 9);
- MD5STEP(F2, c, d, a, b, in[15] + 0xd8a1e681, 14);
- MD5STEP(F2, b, c, d, a, in[4] + 0xe7d3fbc8, 20);
- MD5STEP(F2, a, b, c, d, in[9] + 0x21e1cde6, 5);
- MD5STEP(F2, d, a, b, c, in[14] + 0xc33707d6, 9);
- MD5STEP(F2, c, d, a, b, in[3] + 0xf4d50d87, 14);
- MD5STEP(F2, b, c, d, a, in[8] + 0x455a14ed, 20);
- MD5STEP(F2, a, b, c, d, in[13] + 0xa9e3e905, 5);
- MD5STEP(F2, d, a, b, c, in[2] + 0xfcefa3f8, 9);
- MD5STEP(F2, c, d, a, b, in[7] + 0x676f02d9, 14);
- MD5STEP(F2, b, c, d, a, in[12] + 0x8d2a4c8a, 20);
-
- MD5STEP(F3, a, b, c, d, in[5] + 0xfffa3942, 4);
- MD5STEP(F3, d, a, b, c, in[8] + 0x8771f681, 11);
- MD5STEP(F3, c, d, a, b, in[11] + 0x6d9d6122, 16);
- MD5STEP(F3, b, c, d, a, in[14] + 0xfde5380c, 23);
- MD5STEP(F3, a, b, c, d, in[1] + 0xa4beea44, 4);
- MD5STEP(F3, d, a, b, c, in[4] + 0x4bdecfa9, 11);
- MD5STEP(F3, c, d, a, b, in[7] + 0xf6bb4b60, 16);
- MD5STEP(F3, b, c, d, a, in[10] + 0xbebfbc70, 23);
- MD5STEP(F3, a, b, c, d, in[13] + 0x289b7ec6, 4);
- MD5STEP(F3, d, a, b, c, in[0] + 0xeaa127fa, 11);
- MD5STEP(F3, c, d, a, b, in[3] + 0xd4ef3085, 16);
- MD5STEP(F3, b, c, d, a, in[6] + 0x04881d05, 23);
- MD5STEP(F3, a, b, c, d, in[9] + 0xd9d4d039, 4);
- MD5STEP(F3, d, a, b, c, in[12] + 0xe6db99e5, 11);
- MD5STEP(F3, c, d, a, b, in[15] + 0x1fa27cf8, 16);
- MD5STEP(F3, b, c, d, a, in[2] + 0xc4ac5665, 23);
-
- MD5STEP(F4, a, b, c, d, in[0] + 0xf4292244, 6);
- MD5STEP(F4, d, a, b, c, in[7] + 0x432aff97, 10);
- MD5STEP(F4, c, d, a, b, in[14] + 0xab9423a7, 15);
- MD5STEP(F4, b, c, d, a, in[5] + 0xfc93a039, 21);
- MD5STEP(F4, a, b, c, d, in[12] + 0x655b59c3, 6);
- MD5STEP(F4, d, a, b, c, in[3] + 0x8f0ccc92, 10);
- MD5STEP(F4, c, d, a, b, in[10] + 0xffeff47d, 15);
- MD5STEP(F4, b, c, d, a, in[1] + 0x85845dd1, 21);
- MD5STEP(F4, a, b, c, d, in[8] + 0x6fa87e4f, 6);
- MD5STEP(F4, d, a, b, c, in[15] + 0xfe2ce6e0, 10);
- MD5STEP(F4, c, d, a, b, in[6] + 0xa3014314, 15);
- MD5STEP(F4, b, c, d, a, in[13] + 0x4e0811a1, 21);
- MD5STEP(F4, a, b, c, d, in[4] + 0xf7537e82, 6);
- MD5STEP(F4, d, a, b, c, in[11] + 0xbd3af235, 10);
- MD5STEP(F4, c, d, a, b, in[2] + 0x2ad7d2bb, 15);
- MD5STEP(F4, b, c, d, a, in[9] + 0xeb86d391, 21);
-
- hash[0] += a;
- hash[1] += b;
- hash[2] += c;
- hash[3] += d;
-}
-
/* XXX: this stuff can be optimized */
static inline void le32_to_cpu_array(u32 *buf, unsigned int words)
{
@@ -141,7 +41,7 @@ static inline void cpu_to_le32_array(u32 *buf, unsigned int words)
}
}
-static inline void md5_transform_helper(struct md5_ctx *ctx)
+static inline void md5_transform_helper(struct md5_state *ctx)
{
le32_to_cpu_array(ctx->block, sizeof(ctx->block) / sizeof(u32));
md5_transform(ctx->hash, ctx->block);
@@ -149,7 +49,7 @@ static inline void md5_transform_helper(struct md5_ctx *ctx)
static int md5_init(struct shash_desc *desc)
{
- struct md5_ctx *mctx = shash_desc_ctx(desc);
+ struct md5_state *mctx = shash_desc_ctx(desc);
mctx->hash[0] = 0x67452301;
mctx->hash[1] = 0xefcdab89;
@@ -162,7 +62,7 @@ static int md5_init(struct shash_desc *desc)
static int md5_update(struct shash_desc *desc, const u8 *data, unsigned int len)
{
- struct md5_ctx *mctx = shash_desc_ctx(desc);
+ struct md5_state *mctx = shash_desc_ctx(desc);
const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f);
mctx->byte_count += len;
@@ -194,7 +94,7 @@ static int md5_update(struct shash_desc *desc, const u8 *data, unsigned int len)
static int md5_final(struct shash_desc *desc, u8 *out)
{
- struct md5_ctx *mctx = shash_desc_ctx(desc);
+ struct md5_state *mctx = shash_desc_ctx(desc);
const unsigned int offset = mctx->byte_count & 0x3f;
char *p = (char *)mctx->block + offset;
int padding = 56 - (offset + 1);
@@ -220,12 +120,31 @@ static int md5_final(struct shash_desc *desc, u8 *out)
return 0;
}
+static int md5_export(struct shash_desc *desc, void *out)
+{
+ struct md5_state *ctx = shash_desc_ctx(desc);
+
+ memcpy(out, ctx, sizeof(*ctx));
+ return 0;
+}
+
+static int md5_import(struct shash_desc *desc, const void *in)
+{
+ struct md5_state *ctx = shash_desc_ctx(desc);
+
+ memcpy(ctx, in, sizeof(*ctx));
+ return 0;
+}
+
static struct shash_alg alg = {
.digestsize = MD5_DIGEST_SIZE,
.init = md5_init,
.update = md5_update,
.final = md5_final,
- .descsize = sizeof(struct md5_ctx),
+ .export = md5_export,
+ .import = md5_import,
+ .descsize = sizeof(struct md5_state),
+ .statesize = sizeof(struct md5_state),
.base = {
.cra_name = "md5",
.cra_flags = CRYPTO_ALG_TYPE_SHASH,
diff --git a/crypto/memneq.c b/crypto/memneq.c
new file mode 100644
index 00000000000..afed1bd16ae
--- /dev/null
+++ b/crypto/memneq.c
@@ -0,0 +1,168 @@
+/*
+ * Constant-time equality testing of memory regions.
+ *
+ * Authors:
+ *
+ * James Yonan <james@openvpn.net>
+ * Daniel Borkmann <dborkman@redhat.com>
+ *
+ * This file is provided under a dual BSD/GPLv2 license. When using or
+ * redistributing this file, you may do so under either license.
+ *
+ * GPL LICENSE SUMMARY
+ *
+ * Copyright(c) 2013 OpenVPN Technologies, Inc. All rights reserved.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of version 2 of the GNU General Public License as
+ * published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ * The full GNU General Public License is included in this distribution
+ * in the file called LICENSE.GPL.
+ *
+ * BSD LICENSE
+ *
+ * Copyright(c) 2013 OpenVPN Technologies, Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name of OpenVPN Technologies nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <crypto/algapi.h>
+
+#ifndef __HAVE_ARCH_CRYPTO_MEMNEQ
+
+/* Generic path for arbitrary size */
+static inline unsigned long
+__crypto_memneq_generic(const void *a, const void *b, size_t size)
+{
+ unsigned long neq = 0;
+
+#if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)
+ while (size >= sizeof(unsigned long)) {
+ neq |= *(unsigned long *)a ^ *(unsigned long *)b;
+ OPTIMIZER_HIDE_VAR(neq);
+ a += sizeof(unsigned long);
+ b += sizeof(unsigned long);
+ size -= sizeof(unsigned long);
+ }
+#endif /* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */
+ while (size > 0) {
+ neq |= *(unsigned char *)a ^ *(unsigned char *)b;
+ OPTIMIZER_HIDE_VAR(neq);
+ a += 1;
+ b += 1;
+ size -= 1;
+ }
+ return neq;
+}
+
+/* Loop-free fast-path for frequently used 16-byte size */
+static inline unsigned long __crypto_memneq_16(const void *a, const void *b)
+{
+ unsigned long neq = 0;
+
+#ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
+ if (sizeof(unsigned long) == 8) {
+ neq |= *(unsigned long *)(a) ^ *(unsigned long *)(b);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned long *)(a+8) ^ *(unsigned long *)(b+8);
+ OPTIMIZER_HIDE_VAR(neq);
+ } else if (sizeof(unsigned int) == 4) {
+ neq |= *(unsigned int *)(a) ^ *(unsigned int *)(b);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned int *)(a+4) ^ *(unsigned int *)(b+4);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned int *)(a+8) ^ *(unsigned int *)(b+8);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned int *)(a+12) ^ *(unsigned int *)(b+12);
+ OPTIMIZER_HIDE_VAR(neq);
+ } else
+#endif /* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS */
+ {
+ neq |= *(unsigned char *)(a) ^ *(unsigned char *)(b);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+1) ^ *(unsigned char *)(b+1);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+2) ^ *(unsigned char *)(b+2);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+3) ^ *(unsigned char *)(b+3);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+4) ^ *(unsigned char *)(b+4);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+5) ^ *(unsigned char *)(b+5);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+6) ^ *(unsigned char *)(b+6);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+7) ^ *(unsigned char *)(b+7);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+8) ^ *(unsigned char *)(b+8);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+9) ^ *(unsigned char *)(b+9);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+10) ^ *(unsigned char *)(b+10);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+11) ^ *(unsigned char *)(b+11);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+12) ^ *(unsigned char *)(b+12);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+13) ^ *(unsigned char *)(b+13);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+14) ^ *(unsigned char *)(b+14);
+ OPTIMIZER_HIDE_VAR(neq);
+ neq |= *(unsigned char *)(a+15) ^ *(unsigned char *)(b+15);
+ OPTIMIZER_HIDE_VAR(neq);
+ }
+
+ return neq;
+}
+
+/* Compare two areas of memory without leaking timing information,
+ * and with special optimizations for common sizes. Users should
+ * not call this function directly, but should instead use
+ * crypto_memneq defined in crypto/algapi.h.
+ */
+noinline unsigned long __crypto_memneq(const void *a, const void *b,
+ size_t size)
+{
+ switch (size) {
+ case 16:
+ return __crypto_memneq_16(a, b);
+ default:
+ return __crypto_memneq_generic(a, b, size);
+ }
+}
+EXPORT_SYMBOL(__crypto_memneq);
+
+#endif /* __HAVE_ARCH_CRYPTO_MEMNEQ */
diff --git a/crypto/pcompress.c b/crypto/pcompress.c
new file mode 100644
index 00000000000..7140fe70c7a
--- /dev/null
+++ b/crypto/pcompress.c
@@ -0,0 +1,120 @@
+/*
+ * Cryptographic API.
+ *
+ * Partial (de)compression operations.
+ *
+ * Copyright 2008 Sony Corporation
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; version 2 of the License.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program.
+ * If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <linux/crypto.h>
+#include <linux/errno.h>
+#include <linux/module.h>
+#include <linux/seq_file.h>
+#include <linux/string.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
+
+#include <crypto/compress.h>
+#include <crypto/internal/compress.h>
+
+#include "internal.h"
+
+
+static int crypto_pcomp_init(struct crypto_tfm *tfm, u32 type, u32 mask)
+{
+ return 0;
+}
+
+static unsigned int crypto_pcomp_extsize(struct crypto_alg *alg)
+{
+ return alg->cra_ctxsize;
+}
+
+static int crypto_pcomp_init_tfm(struct crypto_tfm *tfm)
+{
+ return 0;
+}
+
+#ifdef CONFIG_NET
+static int crypto_pcomp_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_comp rpcomp;
+
+ strncpy(rpcomp.type, "pcomp", sizeof(rpcomp.type));
+ if (nla_put(skb, CRYPTOCFGA_REPORT_COMPRESS,
+ sizeof(struct crypto_report_comp), &rpcomp))
+ goto nla_put_failure;
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_pcomp_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
+static void crypto_pcomp_show(struct seq_file *m, struct crypto_alg *alg)
+ __attribute__ ((unused));
+static void crypto_pcomp_show(struct seq_file *m, struct crypto_alg *alg)
+{
+ seq_printf(m, "type : pcomp\n");
+}
+
+static const struct crypto_type crypto_pcomp_type = {
+ .extsize = crypto_pcomp_extsize,
+ .init = crypto_pcomp_init,
+ .init_tfm = crypto_pcomp_init_tfm,
+#ifdef CONFIG_PROC_FS
+ .show = crypto_pcomp_show,
+#endif
+ .report = crypto_pcomp_report,
+ .maskclear = ~CRYPTO_ALG_TYPE_MASK,
+ .maskset = CRYPTO_ALG_TYPE_MASK,
+ .type = CRYPTO_ALG_TYPE_PCOMPRESS,
+ .tfmsize = offsetof(struct crypto_pcomp, base),
+};
+
+struct crypto_pcomp *crypto_alloc_pcomp(const char *alg_name, u32 type,
+ u32 mask)
+{
+ return crypto_alloc_tfm(alg_name, &crypto_pcomp_type, type, mask);
+}
+EXPORT_SYMBOL_GPL(crypto_alloc_pcomp);
+
+int crypto_register_pcomp(struct pcomp_alg *alg)
+{
+ struct crypto_alg *base = &alg->base;
+
+ base->cra_type = &crypto_pcomp_type;
+ base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
+ base->cra_flags |= CRYPTO_ALG_TYPE_PCOMPRESS;
+
+ return crypto_register_alg(base);
+}
+EXPORT_SYMBOL_GPL(crypto_register_pcomp);
+
+int crypto_unregister_pcomp(struct pcomp_alg *alg)
+{
+ return crypto_unregister_alg(&alg->base);
+}
+EXPORT_SYMBOL_GPL(crypto_unregister_pcomp);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Partial (de)compression type");
+MODULE_AUTHOR("Sony Corporation");
diff --git a/crypto/pcrypt.c b/crypto/pcrypt.c
new file mode 100644
index 00000000000..309d345ead9
--- /dev/null
+++ b/crypto/pcrypt.c
@@ -0,0 +1,567 @@
+/*
+ * pcrypt - Parallel crypto wrapper.
+ *
+ * Copyright (C) 2009 secunet Security Networks AG
+ * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ */
+
+#include <crypto/algapi.h>
+#include <crypto/internal/aead.h>
+#include <linux/err.h>
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/slab.h>
+#include <linux/notifier.h>
+#include <linux/kobject.h>
+#include <linux/cpu.h>
+#include <crypto/pcrypt.h>
+
+struct padata_pcrypt {
+ struct padata_instance *pinst;
+ struct workqueue_struct *wq;
+
+ /*
+ * Cpumask for callback CPUs. It should be
+ * equal to serial cpumask of corresponding padata instance,
+ * so it is updated when padata notifies us about serial
+ * cpumask change.
+ *
+ * cb_cpumask is protected by RCU. This fact prevents us from
+ * using cpumask_var_t directly because the actual type of
+ * cpumsak_var_t depends on kernel configuration(particularly on
+ * CONFIG_CPUMASK_OFFSTACK macro). Depending on the configuration
+ * cpumask_var_t may be either a pointer to the struct cpumask
+ * or a variable allocated on the stack. Thus we can not safely use
+ * cpumask_var_t with RCU operations such as rcu_assign_pointer or
+ * rcu_dereference. So cpumask_var_t is wrapped with struct
+ * pcrypt_cpumask which makes possible to use it with RCU.
+ */
+ struct pcrypt_cpumask {
+ cpumask_var_t mask;
+ } *cb_cpumask;
+ struct notifier_block nblock;
+};
+
+static struct padata_pcrypt pencrypt;
+static struct padata_pcrypt pdecrypt;
+static struct kset *pcrypt_kset;
+
+struct pcrypt_instance_ctx {
+ struct crypto_spawn spawn;
+ unsigned int tfm_count;
+};
+
+struct pcrypt_aead_ctx {
+ struct crypto_aead *child;
+ unsigned int cb_cpu;
+};
+
+static int pcrypt_do_parallel(struct padata_priv *padata, unsigned int *cb_cpu,
+ struct padata_pcrypt *pcrypt)
+{
+ unsigned int cpu_index, cpu, i;
+ struct pcrypt_cpumask *cpumask;
+
+ cpu = *cb_cpu;
+
+ rcu_read_lock_bh();
+ cpumask = rcu_dereference_bh(pcrypt->cb_cpumask);
+ if (cpumask_test_cpu(cpu, cpumask->mask))
+ goto out;
+
+ if (!cpumask_weight(cpumask->mask))
+ goto out;
+
+ cpu_index = cpu % cpumask_weight(cpumask->mask);
+
+ cpu = cpumask_first(cpumask->mask);
+ for (i = 0; i < cpu_index; i++)
+ cpu = cpumask_next(cpu, cpumask->mask);
+
+ *cb_cpu = cpu;
+
+out:
+ rcu_read_unlock_bh();
+ return padata_do_parallel(pcrypt->pinst, padata, cpu);
+}
+
+static int pcrypt_aead_setkey(struct crypto_aead *parent,
+ const u8 *key, unsigned int keylen)
+{
+ struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
+
+ return crypto_aead_setkey(ctx->child, key, keylen);
+}
+
+static int pcrypt_aead_setauthsize(struct crypto_aead *parent,
+ unsigned int authsize)
+{
+ struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
+
+ return crypto_aead_setauthsize(ctx->child, authsize);
+}
+
+static void pcrypt_aead_serial(struct padata_priv *padata)
+{
+ struct pcrypt_request *preq = pcrypt_padata_request(padata);
+ struct aead_request *req = pcrypt_request_ctx(preq);
+
+ aead_request_complete(req->base.data, padata->info);
+}
+
+static void pcrypt_aead_giv_serial(struct padata_priv *padata)
+{
+ struct pcrypt_request *preq = pcrypt_padata_request(padata);
+ struct aead_givcrypt_request *req = pcrypt_request_ctx(preq);
+
+ aead_request_complete(req->areq.base.data, padata->info);
+}
+
+static void pcrypt_aead_done(struct crypto_async_request *areq, int err)
+{
+ struct aead_request *req = areq->data;
+ struct pcrypt_request *preq = aead_request_ctx(req);
+ struct padata_priv *padata = pcrypt_request_padata(preq);
+
+ padata->info = err;
+ req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
+
+ padata_do_serial(padata);
+}
+
+static void pcrypt_aead_enc(struct padata_priv *padata)
+{
+ struct pcrypt_request *preq = pcrypt_padata_request(padata);
+ struct aead_request *req = pcrypt_request_ctx(preq);
+
+ padata->info = crypto_aead_encrypt(req);
+
+ if (padata->info == -EINPROGRESS)
+ return;
+
+ padata_do_serial(padata);
+}
+
+static int pcrypt_aead_encrypt(struct aead_request *req)
+{
+ int err;
+ struct pcrypt_request *preq = aead_request_ctx(req);
+ struct aead_request *creq = pcrypt_request_ctx(preq);
+ struct padata_priv *padata = pcrypt_request_padata(preq);
+ struct crypto_aead *aead = crypto_aead_reqtfm(req);
+ struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
+ u32 flags = aead_request_flags(req);
+
+ memset(padata, 0, sizeof(struct padata_priv));
+
+ padata->parallel = pcrypt_aead_enc;
+ padata->serial = pcrypt_aead_serial;
+
+ aead_request_set_tfm(creq, ctx->child);
+ aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
+ pcrypt_aead_done, req);
+ aead_request_set_crypt(creq, req->src, req->dst,
+ req->cryptlen, req->iv);
+ aead_request_set_assoc(creq, req->assoc, req->assoclen);
+
+ err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pencrypt);
+ if (!err)
+ return -EINPROGRESS;
+
+ return err;
+}
+
+static void pcrypt_aead_dec(struct padata_priv *padata)
+{
+ struct pcrypt_request *preq = pcrypt_padata_request(padata);
+ struct aead_request *req = pcrypt_request_ctx(preq);
+
+ padata->info = crypto_aead_decrypt(req);
+
+ if (padata->info == -EINPROGRESS)
+ return;
+
+ padata_do_serial(padata);
+}
+
+static int pcrypt_aead_decrypt(struct aead_request *req)
+{
+ int err;
+ struct pcrypt_request *preq = aead_request_ctx(req);
+ struct aead_request *creq = pcrypt_request_ctx(preq);
+ struct padata_priv *padata = pcrypt_request_padata(preq);
+ struct crypto_aead *aead = crypto_aead_reqtfm(req);
+ struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
+ u32 flags = aead_request_flags(req);
+
+ memset(padata, 0, sizeof(struct padata_priv));
+
+ padata->parallel = pcrypt_aead_dec;
+ padata->serial = pcrypt_aead_serial;
+
+ aead_request_set_tfm(creq, ctx->child);
+ aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
+ pcrypt_aead_done, req);
+ aead_request_set_crypt(creq, req->src, req->dst,
+ req->cryptlen, req->iv);
+ aead_request_set_assoc(creq, req->assoc, req->assoclen);
+
+ err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pdecrypt);
+ if (!err)
+ return -EINPROGRESS;
+
+ return err;
+}
+
+static void pcrypt_aead_givenc(struct padata_priv *padata)
+{
+ struct pcrypt_request *preq = pcrypt_padata_request(padata);
+ struct aead_givcrypt_request *req = pcrypt_request_ctx(preq);
+
+ padata->info = crypto_aead_givencrypt(req);
+
+ if (padata->info == -EINPROGRESS)
+ return;
+
+ padata_do_serial(padata);
+}
+
+static int pcrypt_aead_givencrypt(struct aead_givcrypt_request *req)
+{
+ int err;
+ struct aead_request *areq = &req->areq;
+ struct pcrypt_request *preq = aead_request_ctx(areq);
+ struct aead_givcrypt_request *creq = pcrypt_request_ctx(preq);
+ struct padata_priv *padata = pcrypt_request_padata(preq);
+ struct crypto_aead *aead = aead_givcrypt_reqtfm(req);
+ struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
+ u32 flags = aead_request_flags(areq);
+
+ memset(padata, 0, sizeof(struct padata_priv));
+
+ padata->parallel = pcrypt_aead_givenc;
+ padata->serial = pcrypt_aead_giv_serial;
+
+ aead_givcrypt_set_tfm(creq, ctx->child);
+ aead_givcrypt_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
+ pcrypt_aead_done, areq);
+ aead_givcrypt_set_crypt(creq, areq->src, areq->dst,
+ areq->cryptlen, areq->iv);
+ aead_givcrypt_set_assoc(creq, areq->assoc, areq->assoclen);
+ aead_givcrypt_set_giv(creq, req->giv, req->seq);
+
+ err = pcrypt_do_parallel(padata, &ctx->cb_cpu, &pencrypt);
+ if (!err)
+ return -EINPROGRESS;
+
+ return err;
+}
+
+static int pcrypt_aead_init_tfm(struct crypto_tfm *tfm)
+{
+ int cpu, cpu_index;
+ struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
+ struct pcrypt_instance_ctx *ictx = crypto_instance_ctx(inst);
+ struct pcrypt_aead_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct crypto_aead *cipher;
+
+ ictx->tfm_count++;
+
+ cpu_index = ictx->tfm_count % cpumask_weight(cpu_online_mask);
+
+ ctx->cb_cpu = cpumask_first(cpu_online_mask);
+ for (cpu = 0; cpu < cpu_index; cpu++)
+ ctx->cb_cpu = cpumask_next(ctx->cb_cpu, cpu_online_mask);
+
+ cipher = crypto_spawn_aead(crypto_instance_ctx(inst));
+
+ if (IS_ERR(cipher))
+ return PTR_ERR(cipher);
+
+ ctx->child = cipher;
+ tfm->crt_aead.reqsize = sizeof(struct pcrypt_request)
+ + sizeof(struct aead_givcrypt_request)
+ + crypto_aead_reqsize(cipher);
+
+ return 0;
+}
+
+static void pcrypt_aead_exit_tfm(struct crypto_tfm *tfm)
+{
+ struct pcrypt_aead_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ crypto_free_aead(ctx->child);
+}
+
+static struct crypto_instance *pcrypt_alloc_instance(struct crypto_alg *alg)
+{
+ struct crypto_instance *inst;
+ struct pcrypt_instance_ctx *ctx;
+ int err;
+
+ inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
+ if (!inst) {
+ inst = ERR_PTR(-ENOMEM);
+ goto out;
+ }
+
+ err = -ENAMETOOLONG;
+ if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
+ "pcrypt(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
+ goto out_free_inst;
+
+ memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
+
+ ctx = crypto_instance_ctx(inst);
+ err = crypto_init_spawn(&ctx->spawn, alg, inst,
+ CRYPTO_ALG_TYPE_MASK);
+ if (err)
+ goto out_free_inst;
+
+ inst->alg.cra_priority = alg->cra_priority + 100;
+ inst->alg.cra_blocksize = alg->cra_blocksize;
+ inst->alg.cra_alignmask = alg->cra_alignmask;
+
+out:
+ return inst;
+
+out_free_inst:
+ kfree(inst);
+ inst = ERR_PTR(err);
+ goto out;
+}
+
+static struct crypto_instance *pcrypt_alloc_aead(struct rtattr **tb,
+ u32 type, u32 mask)
+{
+ struct crypto_instance *inst;
+ struct crypto_alg *alg;
+
+ alg = crypto_get_attr_alg(tb, type, (mask & CRYPTO_ALG_TYPE_MASK));
+ if (IS_ERR(alg))
+ return ERR_CAST(alg);
+
+ inst = pcrypt_alloc_instance(alg);
+ if (IS_ERR(inst))
+ goto out_put_alg;
+
+ inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC;
+ inst->alg.cra_type = &crypto_aead_type;
+
+ inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize;
+ inst->alg.cra_aead.geniv = alg->cra_aead.geniv;
+ inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
+
+ inst->alg.cra_ctxsize = sizeof(struct pcrypt_aead_ctx);
+
+ inst->alg.cra_init = pcrypt_aead_init_tfm;
+ inst->alg.cra_exit = pcrypt_aead_exit_tfm;
+
+ inst->alg.cra_aead.setkey = pcrypt_aead_setkey;
+ inst->alg.cra_aead.setauthsize = pcrypt_aead_setauthsize;
+ inst->alg.cra_aead.encrypt = pcrypt_aead_encrypt;
+ inst->alg.cra_aead.decrypt = pcrypt_aead_decrypt;
+ inst->alg.cra_aead.givencrypt = pcrypt_aead_givencrypt;
+
+out_put_alg:
+ crypto_mod_put(alg);
+ return inst;
+}
+
+static struct crypto_instance *pcrypt_alloc(struct rtattr **tb)
+{
+ struct crypto_attr_type *algt;
+
+ algt = crypto_get_attr_type(tb);
+ if (IS_ERR(algt))
+ return ERR_CAST(algt);
+
+ switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
+ case CRYPTO_ALG_TYPE_AEAD:
+ return pcrypt_alloc_aead(tb, algt->type, algt->mask);
+ }
+
+ return ERR_PTR(-EINVAL);
+}
+
+static void pcrypt_free(struct crypto_instance *inst)
+{
+ struct pcrypt_instance_ctx *ctx = crypto_instance_ctx(inst);
+
+ crypto_drop_spawn(&ctx->spawn);
+ kfree(inst);
+}
+
+static int pcrypt_cpumask_change_notify(struct notifier_block *self,
+ unsigned long val, void *data)
+{
+ struct padata_pcrypt *pcrypt;
+ struct pcrypt_cpumask *new_mask, *old_mask;
+ struct padata_cpumask *cpumask = (struct padata_cpumask *)data;
+
+ if (!(val & PADATA_CPU_SERIAL))
+ return 0;
+
+ pcrypt = container_of(self, struct padata_pcrypt, nblock);
+ new_mask = kmalloc(sizeof(*new_mask), GFP_KERNEL);
+ if (!new_mask)
+ return -ENOMEM;
+ if (!alloc_cpumask_var(&new_mask->mask, GFP_KERNEL)) {
+ kfree(new_mask);
+ return -ENOMEM;
+ }
+
+ old_mask = pcrypt->cb_cpumask;
+
+ cpumask_copy(new_mask->mask, cpumask->cbcpu);
+ rcu_assign_pointer(pcrypt->cb_cpumask, new_mask);
+ synchronize_rcu_bh();
+
+ free_cpumask_var(old_mask->mask);
+ kfree(old_mask);
+ return 0;
+}
+
+static int pcrypt_sysfs_add(struct padata_instance *pinst, const char *name)
+{
+ int ret;
+
+ pinst->kobj.kset = pcrypt_kset;
+ ret = kobject_add(&pinst->kobj, NULL, name);
+ if (!ret)
+ kobject_uevent(&pinst->kobj, KOBJ_ADD);
+
+ return ret;
+}
+
+static int pcrypt_init_padata(struct padata_pcrypt *pcrypt,
+ const char *name)
+{
+ int ret = -ENOMEM;
+ struct pcrypt_cpumask *mask;
+
+ get_online_cpus();
+
+ pcrypt->wq = alloc_workqueue("%s", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE,
+ 1, name);
+ if (!pcrypt->wq)
+ goto err;
+
+ pcrypt->pinst = padata_alloc_possible(pcrypt->wq);
+ if (!pcrypt->pinst)
+ goto err_destroy_workqueue;
+
+ mask = kmalloc(sizeof(*mask), GFP_KERNEL);
+ if (!mask)
+ goto err_free_padata;
+ if (!alloc_cpumask_var(&mask->mask, GFP_KERNEL)) {
+ kfree(mask);
+ goto err_free_padata;
+ }
+
+ cpumask_and(mask->mask, cpu_possible_mask, cpu_online_mask);
+ rcu_assign_pointer(pcrypt->cb_cpumask, mask);
+
+ pcrypt->nblock.notifier_call = pcrypt_cpumask_change_notify;
+ ret = padata_register_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
+ if (ret)
+ goto err_free_cpumask;
+
+ ret = pcrypt_sysfs_add(pcrypt->pinst, name);
+ if (ret)
+ goto err_unregister_notifier;
+
+ put_online_cpus();
+
+ return ret;
+
+err_unregister_notifier:
+ padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
+err_free_cpumask:
+ free_cpumask_var(mask->mask);
+ kfree(mask);
+err_free_padata:
+ padata_free(pcrypt->pinst);
+err_destroy_workqueue:
+ destroy_workqueue(pcrypt->wq);
+err:
+ put_online_cpus();
+
+ return ret;
+}
+
+static void pcrypt_fini_padata(struct padata_pcrypt *pcrypt)
+{
+ free_cpumask_var(pcrypt->cb_cpumask->mask);
+ kfree(pcrypt->cb_cpumask);
+
+ padata_stop(pcrypt->pinst);
+ padata_unregister_cpumask_notifier(pcrypt->pinst, &pcrypt->nblock);
+ destroy_workqueue(pcrypt->wq);
+ padata_free(pcrypt->pinst);
+}
+
+static struct crypto_template pcrypt_tmpl = {
+ .name = "pcrypt",
+ .alloc = pcrypt_alloc,
+ .free = pcrypt_free,
+ .module = THIS_MODULE,
+};
+
+static int __init pcrypt_init(void)
+{
+ int err = -ENOMEM;
+
+ pcrypt_kset = kset_create_and_add("pcrypt", NULL, kernel_kobj);
+ if (!pcrypt_kset)
+ goto err;
+
+ err = pcrypt_init_padata(&pencrypt, "pencrypt");
+ if (err)
+ goto err_unreg_kset;
+
+ err = pcrypt_init_padata(&pdecrypt, "pdecrypt");
+ if (err)
+ goto err_deinit_pencrypt;
+
+ padata_start(pencrypt.pinst);
+ padata_start(pdecrypt.pinst);
+
+ return crypto_register_template(&pcrypt_tmpl);
+
+err_deinit_pencrypt:
+ pcrypt_fini_padata(&pencrypt);
+err_unreg_kset:
+ kset_unregister(pcrypt_kset);
+err:
+ return err;
+}
+
+static void __exit pcrypt_exit(void)
+{
+ pcrypt_fini_padata(&pencrypt);
+ pcrypt_fini_padata(&pdecrypt);
+
+ kset_unregister(pcrypt_kset);
+ crypto_unregister_template(&pcrypt_tmpl);
+}
+
+module_init(pcrypt_init);
+module_exit(pcrypt_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
+MODULE_DESCRIPTION("Parallel crypto wrapper");
diff --git a/crypto/proc.c b/crypto/proc.c
index 5dc07e442fc..4a0a7aad220 100644
--- a/crypto/proc.c
+++ b/crypto/proc.c
@@ -13,9 +13,10 @@
*
*/
-#include <asm/atomic.h>
+#include <linux/atomic.h>
#include <linux/init.h>
#include <linux/crypto.h>
+#include <linux/module.h> /* for module_name() */
#include <linux/rwsem.h>
#include <linux/proc_fs.h>
#include <linux/seq_file.h>
@@ -25,28 +26,22 @@
#ifdef CONFIG_CRYPTO_FIPS
static struct ctl_table crypto_sysctl_table[] = {
{
- .ctl_name = CTL_UNNUMBERED,
.procname = "fips_enabled",
.data = &fips_enabled,
.maxlen = sizeof(int),
.mode = 0444,
- .proc_handler = &proc_dointvec
- },
- {
- .ctl_name = 0,
+ .proc_handler = proc_dointvec
},
+ {}
};
static struct ctl_table crypto_dir_table[] = {
{
- .ctl_name = CTL_UNNUMBERED,
.procname = "crypto",
.mode = 0555,
.child = crypto_sysctl_table
},
- {
- .ctl_name = 0,
- },
+ {}
};
static struct ctl_table_header *crypto_sysctls;
@@ -115,13 +110,6 @@ static int c_show(struct seq_file *m, void *p)
seq_printf(m, "max keysize : %u\n",
alg->cra_cipher.cia_max_keysize);
break;
-
- case CRYPTO_ALG_TYPE_DIGEST:
- seq_printf(m, "type : digest\n");
- seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
- seq_printf(m, "digestsize : %u\n",
- alg->cra_digest.dia_digestsize);
- break;
case CRYPTO_ALG_TYPE_COMPRESS:
seq_printf(m, "type : compression\n");
break;
diff --git a/crypto/rmd128.c b/crypto/rmd128.c
index 1ceb6735aa5..8a0f68b7f25 100644
--- a/crypto/rmd128.c
+++ b/crypto/rmd128.c
@@ -5,7 +5,7 @@
*
* Based on the reference implementation by Antoon Bosselaers, ESAT-COSIC
*
- * Copyright (c) 2008 Adrian-Ken Rueegsegger <rueegsegger (at) swiss-it.ch>
+ * Copyright (c) 2008 Adrian-Ken Rueegsegger <ken@codelabs.ch>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
@@ -325,4 +325,5 @@ module_init(rmd128_mod_init);
module_exit(rmd128_mod_fini);
MODULE_LICENSE("GPL");
+MODULE_AUTHOR("Adrian-Ken Rueegsegger <ken@codelabs.ch>");
MODULE_DESCRIPTION("RIPEMD-128 Message Digest");
diff --git a/crypto/rmd160.c b/crypto/rmd160.c
index 472261fc913..525d7bb752c 100644
--- a/crypto/rmd160.c
+++ b/crypto/rmd160.c
@@ -5,7 +5,7 @@
*
* Based on the reference implementation by Antoon Bosselaers, ESAT-COSIC
*
- * Copyright (c) 2008 Adrian-Ken Rueegsegger <rueegsegger (at) swiss-it.ch>
+ * Copyright (c) 2008 Adrian-Ken Rueegsegger <ken@codelabs.ch>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
@@ -369,4 +369,5 @@ module_init(rmd160_mod_init);
module_exit(rmd160_mod_fini);
MODULE_LICENSE("GPL");
+MODULE_AUTHOR("Adrian-Ken Rueegsegger <ken@codelabs.ch>");
MODULE_DESCRIPTION("RIPEMD-160 Message Digest");
diff --git a/crypto/rmd256.c b/crypto/rmd256.c
index 72eafa8d2e7..69293d9b56e 100644
--- a/crypto/rmd256.c
+++ b/crypto/rmd256.c
@@ -5,7 +5,7 @@
*
* Based on the reference implementation by Antoon Bosselaers, ESAT-COSIC
*
- * Copyright (c) 2008 Adrian-Ken Rueegsegger <rueegsegger (at) swiss-it.ch>
+ * Copyright (c) 2008 Adrian-Ken Rueegsegger <ken@codelabs.ch>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
@@ -344,4 +344,5 @@ module_init(rmd256_mod_init);
module_exit(rmd256_mod_fini);
MODULE_LICENSE("GPL");
+MODULE_AUTHOR("Adrian-Ken Rueegsegger <ken@codelabs.ch>");
MODULE_DESCRIPTION("RIPEMD-256 Message Digest");
diff --git a/crypto/rmd320.c b/crypto/rmd320.c
index 86becaba2f0..09f97dfdfbb 100644
--- a/crypto/rmd320.c
+++ b/crypto/rmd320.c
@@ -5,7 +5,7 @@
*
* Based on the reference implementation by Antoon Bosselaers, ESAT-COSIC
*
- * Copyright (c) 2008 Adrian-Ken Rueegsegger <rueegsegger (at) swiss-it.ch>
+ * Copyright (c) 2008 Adrian-Ken Rueegsegger <ken@codelabs.ch>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
@@ -393,4 +393,5 @@ module_init(rmd320_mod_init);
module_exit(rmd320_mod_fini);
MODULE_LICENSE("GPL");
+MODULE_AUTHOR("Adrian-Ken Rueegsegger <ken@codelabs.ch>");
MODULE_DESCRIPTION("RIPEMD-320 Message Digest");
diff --git a/crypto/rng.c b/crypto/rng.c
index 6e94bc73557..e0a25c2456d 100644
--- a/crypto/rng.c
+++ b/crypto/rng.c
@@ -12,14 +12,17 @@
*
*/
-#include <asm/atomic.h>
+#include <linux/atomic.h>
#include <crypto/internal/rng.h>
#include <linux/err.h>
#include <linux/module.h>
#include <linux/mutex.h>
#include <linux/random.h>
#include <linux/seq_file.h>
+#include <linux/slab.h>
#include <linux/string.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
static DEFINE_MUTEX(crypto_default_rng_lock);
struct crypto_rng *crypto_default_rng;
@@ -57,6 +60,30 @@ static int crypto_init_rng_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
return 0;
}
+#ifdef CONFIG_NET
+static int crypto_rng_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_rng rrng;
+
+ strncpy(rrng.type, "rng", sizeof(rrng.type));
+
+ rrng.seedsize = alg->cra_rng.seedsize;
+
+ if (nla_put(skb, CRYPTOCFGA_REPORT_RNG,
+ sizeof(struct crypto_report_rng), &rrng))
+ goto nla_put_failure;
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_rng_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_rng_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_rng_show(struct seq_file *m, struct crypto_alg *alg)
@@ -77,6 +104,7 @@ const struct crypto_type crypto_rng_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_rng_show,
#endif
+ .report = crypto_rng_report,
};
EXPORT_SYMBOL_GPL(crypto_rng_type);
@@ -123,4 +151,4 @@ void crypto_put_default_rng(void)
EXPORT_SYMBOL_GPL(crypto_put_default_rng);
MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION("Random Number Genertor");
+MODULE_DESCRIPTION("Random Number Generator");
diff --git a/crypto/salsa20_generic.c b/crypto/salsa20_generic.c
index eac10c11685..9a4770c0228 100644
--- a/crypto/salsa20_generic.c
+++ b/crypto/salsa20_generic.c
@@ -221,7 +221,6 @@ static struct crypto_alg alg = {
.cra_ctxsize = sizeof(struct salsa20_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(alg.cra_list),
.cra_u = {
.blkcipher = {
.setkey = setkey,
diff --git a/crypto/scatterwalk.c b/crypto/scatterwalk.c
index 3de89a42440..79ca2278c2a 100644
--- a/crypto/scatterwalk.c
+++ b/crypto/scatterwalk.c
@@ -40,9 +40,9 @@ void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg)
}
EXPORT_SYMBOL_GPL(scatterwalk_start);
-void *scatterwalk_map(struct scatter_walk *walk, int out)
+void *scatterwalk_map(struct scatter_walk *walk)
{
- return crypto_kmap(scatterwalk_page(walk), out) +
+ return kmap_atomic(scatterwalk_page(walk)) +
offset_in_page(walk->offset);
}
EXPORT_SYMBOL_GPL(scatterwalk_map);
@@ -68,7 +68,7 @@ static void scatterwalk_pagedone(struct scatter_walk *walk, int out,
void scatterwalk_done(struct scatter_walk *walk, int out, int more)
{
- if (!offset_in_page(walk->offset) || !more)
+ if (!(scatterwalk_pagelen(walk) & (PAGE_SIZE - 1)) || !more)
scatterwalk_pagedone(walk, out, more);
}
EXPORT_SYMBOL_GPL(scatterwalk_done);
@@ -83,9 +83,9 @@ void scatterwalk_copychunks(void *buf, struct scatter_walk *walk,
if (len_this_page > nbytes)
len_this_page = nbytes;
- vaddr = scatterwalk_map(walk, out);
+ vaddr = scatterwalk_map(walk);
memcpy_dir(buf, vaddr, len_this_page, out);
- scatterwalk_unmap(vaddr, out);
+ scatterwalk_unmap(vaddr);
scatterwalk_advance(walk, len_this_page);
@@ -124,3 +124,25 @@ void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg,
scatterwalk_done(&walk, out, 0);
}
EXPORT_SYMBOL_GPL(scatterwalk_map_and_copy);
+
+int scatterwalk_bytes_sglen(struct scatterlist *sg, int num_bytes)
+{
+ int offset = 0, n = 0;
+
+ /* num_bytes is too small */
+ if (num_bytes < sg->length)
+ return -1;
+
+ do {
+ offset += sg->length;
+ n++;
+ sg = scatterwalk_sg_next(sg);
+
+ /* num_bytes is too large */
+ if (unlikely(!sg && (num_bytes < offset)))
+ return -1;
+ } while (sg && (num_bytes > offset));
+
+ return n;
+}
+EXPORT_SYMBOL_GPL(scatterwalk_bytes_sglen);
diff --git a/crypto/seed.c b/crypto/seed.c
index d3e422f6055..9c904d6d215 100644
--- a/crypto/seed.c
+++ b/crypto/seed.c
@@ -449,7 +449,6 @@ static struct crypto_alg seed_alg = {
.cra_ctxsize = sizeof(struct seed_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(seed_alg.cra_list),
.cra_u = {
.cipher = {
.cia_min_keysize = SEED_KEY_SIZE,
diff --git a/crypto/seqiv.c b/crypto/seqiv.c
index 5a013a8bf87..f2cba4ed6f2 100644
--- a/crypto/seqiv.c
+++ b/crypto/seqiv.c
@@ -20,6 +20,7 @@
#include <linux/init.h>
#include <linux/kernel.h>
#include <linux/module.h>
+#include <linux/slab.h>
#include <linux/spinlock.h>
#include <linux/string.h>
@@ -304,9 +305,8 @@ static struct crypto_instance *seqiv_alloc(struct rtattr **tb)
int err;
algt = crypto_get_attr_type(tb);
- err = PTR_ERR(algt);
if (IS_ERR(algt))
- return ERR_PTR(err);
+ return ERR_CAST(algt);
err = crypto_get_default_rng();
if (err)
diff --git a/crypto/serpent.c b/crypto/serpent.c
deleted file mode 100644
index b651a55fa56..00000000000
--- a/crypto/serpent.c
+++ /dev/null
@@ -1,587 +0,0 @@
-/*
- * Cryptographic API.
- *
- * Serpent Cipher Algorithm.
- *
- * Copyright (C) 2002 Dag Arne Osvik <osvik@ii.uib.no>
- * 2003 Herbert Valerio Riedel <hvr@gnu.org>
- *
- * Added tnepres support: Ruben Jesus Garcia Hernandez <ruben@ugr.es>, 18.10.2004
- * Based on code by hvr
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; either version 2 of the License, or
- * (at your option) any later version.
- */
-
-#include <linux/init.h>
-#include <linux/module.h>
-#include <linux/errno.h>
-#include <asm/byteorder.h>
-#include <linux/crypto.h>
-#include <linux/types.h>
-
-/* Key is padded to the maximum of 256 bits before round key generation.
- * Any key length <= 256 bits (32 bytes) is allowed by the algorithm.
- */
-
-#define SERPENT_MIN_KEY_SIZE 0
-#define SERPENT_MAX_KEY_SIZE 32
-#define SERPENT_EXPKEY_WORDS 132
-#define SERPENT_BLOCK_SIZE 16
-
-#define PHI 0x9e3779b9UL
-
-#define keyiter(a,b,c,d,i,j) \
- b ^= d; b ^= c; b ^= a; b ^= PHI ^ i; b = rol32(b,11); k[j] = b;
-
-#define loadkeys(x0,x1,x2,x3,i) \
- x0=k[i]; x1=k[i+1]; x2=k[i+2]; x3=k[i+3];
-
-#define storekeys(x0,x1,x2,x3,i) \
- k[i]=x0; k[i+1]=x1; k[i+2]=x2; k[i+3]=x3;
-
-#define K(x0,x1,x2,x3,i) \
- x3 ^= k[4*(i)+3]; x2 ^= k[4*(i)+2]; \
- x1 ^= k[4*(i)+1]; x0 ^= k[4*(i)+0];
-
-#define LK(x0,x1,x2,x3,x4,i) \
- x0=rol32(x0,13);\
- x2=rol32(x2,3); x1 ^= x0; x4 = x0 << 3; \
- x3 ^= x2; x1 ^= x2; \
- x1=rol32(x1,1); x3 ^= x4; \
- x3=rol32(x3,7); x4 = x1; \
- x0 ^= x1; x4 <<= 7; x2 ^= x3; \
- x0 ^= x3; x2 ^= x4; x3 ^= k[4*i+3]; \
- x1 ^= k[4*i+1]; x0=rol32(x0,5); x2=rol32(x2,22);\
- x0 ^= k[4*i+0]; x2 ^= k[4*i+2];
-
-#define KL(x0,x1,x2,x3,x4,i) \
- x0 ^= k[4*i+0]; x1 ^= k[4*i+1]; x2 ^= k[4*i+2]; \
- x3 ^= k[4*i+3]; x0=ror32(x0,5); x2=ror32(x2,22);\
- x4 = x1; x2 ^= x3; x0 ^= x3; \
- x4 <<= 7; x0 ^= x1; x1=ror32(x1,1); \
- x2 ^= x4; x3=ror32(x3,7); x4 = x0 << 3; \
- x1 ^= x0; x3 ^= x4; x0=ror32(x0,13);\
- x1 ^= x2; x3 ^= x2; x2=ror32(x2,3);
-
-#define S0(x0,x1,x2,x3,x4) \
- x4 = x3; \
- x3 |= x0; x0 ^= x4; x4 ^= x2; \
- x4 =~ x4; x3 ^= x1; x1 &= x0; \
- x1 ^= x4; x2 ^= x0; x0 ^= x3; \
- x4 |= x0; x0 ^= x2; x2 &= x1; \
- x3 ^= x2; x1 =~ x1; x2 ^= x4; \
- x1 ^= x2;
-
-#define S1(x0,x1,x2,x3,x4) \
- x4 = x1; \
- x1 ^= x0; x0 ^= x3; x3 =~ x3; \
- x4 &= x1; x0 |= x1; x3 ^= x2; \
- x0 ^= x3; x1 ^= x3; x3 ^= x4; \
- x1 |= x4; x4 ^= x2; x2 &= x0; \
- x2 ^= x1; x1 |= x0; x0 =~ x0; \
- x0 ^= x2; x4 ^= x1;
-
-#define S2(x0,x1,x2,x3,x4) \
- x3 =~ x3; \
- x1 ^= x0; x4 = x0; x0 &= x2; \
- x0 ^= x3; x3 |= x4; x2 ^= x1; \
- x3 ^= x1; x1 &= x0; x0 ^= x2; \
- x2 &= x3; x3 |= x1; x0 =~ x0; \
- x3 ^= x0; x4 ^= x0; x0 ^= x2; \
- x1 |= x2;
-
-#define S3(x0,x1,x2,x3,x4) \
- x4 = x1; \
- x1 ^= x3; x3 |= x0; x4 &= x0; \
- x0 ^= x2; x2 ^= x1; x1 &= x3; \
- x2 ^= x3; x0 |= x4; x4 ^= x3; \
- x1 ^= x0; x0 &= x3; x3 &= x4; \
- x3 ^= x2; x4 |= x1; x2 &= x1; \
- x4 ^= x3; x0 ^= x3; x3 ^= x2;
-
-#define S4(x0,x1,x2,x3,x4) \
- x4 = x3; \
- x3 &= x0; x0 ^= x4; \
- x3 ^= x2; x2 |= x4; x0 ^= x1; \
- x4 ^= x3; x2 |= x0; \
- x2 ^= x1; x1 &= x0; \
- x1 ^= x4; x4 &= x2; x2 ^= x3; \
- x4 ^= x0; x3 |= x1; x1 =~ x1; \
- x3 ^= x0;
-
-#define S5(x0,x1,x2,x3,x4) \
- x4 = x1; x1 |= x0; \
- x2 ^= x1; x3 =~ x3; x4 ^= x0; \
- x0 ^= x2; x1 &= x4; x4 |= x3; \
- x4 ^= x0; x0 &= x3; x1 ^= x3; \
- x3 ^= x2; x0 ^= x1; x2 &= x4; \
- x1 ^= x2; x2 &= x0; \
- x3 ^= x2;
-
-#define S6(x0,x1,x2,x3,x4) \
- x4 = x1; \
- x3 ^= x0; x1 ^= x2; x2 ^= x0; \
- x0 &= x3; x1 |= x3; x4 =~ x4; \
- x0 ^= x1; x1 ^= x2; \
- x3 ^= x4; x4 ^= x0; x2 &= x0; \
- x4 ^= x1; x2 ^= x3; x3 &= x1; \
- x3 ^= x0; x1 ^= x2;
-
-#define S7(x0,x1,x2,x3,x4) \
- x1 =~ x1; \
- x4 = x1; x0 =~ x0; x1 &= x2; \
- x1 ^= x3; x3 |= x4; x4 ^= x2; \
- x2 ^= x3; x3 ^= x0; x0 |= x1; \
- x2 &= x0; x0 ^= x4; x4 ^= x3; \
- x3 &= x0; x4 ^= x1; \
- x2 ^= x4; x3 ^= x1; x4 |= x0; \
- x4 ^= x1;
-
-#define SI0(x0,x1,x2,x3,x4) \
- x4 = x3; x1 ^= x0; \
- x3 |= x1; x4 ^= x1; x0 =~ x0; \
- x2 ^= x3; x3 ^= x0; x0 &= x1; \
- x0 ^= x2; x2 &= x3; x3 ^= x4; \
- x2 ^= x3; x1 ^= x3; x3 &= x0; \
- x1 ^= x0; x0 ^= x2; x4 ^= x3;
-
-#define SI1(x0,x1,x2,x3,x4) \
- x1 ^= x3; x4 = x0; \
- x0 ^= x2; x2 =~ x2; x4 |= x1; \
- x4 ^= x3; x3 &= x1; x1 ^= x2; \
- x2 &= x4; x4 ^= x1; x1 |= x3; \
- x3 ^= x0; x2 ^= x0; x0 |= x4; \
- x2 ^= x4; x1 ^= x0; \
- x4 ^= x1;
-
-#define SI2(x0,x1,x2,x3,x4) \
- x2 ^= x1; x4 = x3; x3 =~ x3; \
- x3 |= x2; x2 ^= x4; x4 ^= x0; \
- x3 ^= x1; x1 |= x2; x2 ^= x0; \
- x1 ^= x4; x4 |= x3; x2 ^= x3; \
- x4 ^= x2; x2 &= x1; \
- x2 ^= x3; x3 ^= x4; x4 ^= x0;
-
-#define SI3(x0,x1,x2,x3,x4) \
- x2 ^= x1; \
- x4 = x1; x1 &= x2; \
- x1 ^= x0; x0 |= x4; x4 ^= x3; \
- x0 ^= x3; x3 |= x1; x1 ^= x2; \
- x1 ^= x3; x0 ^= x2; x2 ^= x3; \
- x3 &= x1; x1 ^= x0; x0 &= x2; \
- x4 ^= x3; x3 ^= x0; x0 ^= x1;
-
-#define SI4(x0,x1,x2,x3,x4) \
- x2 ^= x3; x4 = x0; x0 &= x1; \
- x0 ^= x2; x2 |= x3; x4 =~ x4; \
- x1 ^= x0; x0 ^= x2; x2 &= x4; \
- x2 ^= x0; x0 |= x4; \
- x0 ^= x3; x3 &= x2; \
- x4 ^= x3; x3 ^= x1; x1 &= x0; \
- x4 ^= x1; x0 ^= x3;
-
-#define SI5(x0,x1,x2,x3,x4) \
- x4 = x1; x1 |= x2; \
- x2 ^= x4; x1 ^= x3; x3 &= x4; \
- x2 ^= x3; x3 |= x0; x0 =~ x0; \
- x3 ^= x2; x2 |= x0; x4 ^= x1; \
- x2 ^= x4; x4 &= x0; x0 ^= x1; \
- x1 ^= x3; x0 &= x2; x2 ^= x3; \
- x0 ^= x2; x2 ^= x4; x4 ^= x3;
-
-#define SI6(x0,x1,x2,x3,x4) \
- x0 ^= x2; \
- x4 = x0; x0 &= x3; x2 ^= x3; \
- x0 ^= x2; x3 ^= x1; x2 |= x4; \
- x2 ^= x3; x3 &= x0; x0 =~ x0; \
- x3 ^= x1; x1 &= x2; x4 ^= x0; \
- x3 ^= x4; x4 ^= x2; x0 ^= x1; \
- x2 ^= x0;
-
-#define SI7(x0,x1,x2,x3,x4) \
- x4 = x3; x3 &= x0; x0 ^= x2; \
- x2 |= x4; x4 ^= x1; x0 =~ x0; \
- x1 |= x3; x4 ^= x0; x0 &= x2; \
- x0 ^= x1; x1 &= x2; x3 ^= x2; \
- x4 ^= x3; x2 &= x3; x3 |= x0; \
- x1 ^= x4; x3 ^= x4; x4 &= x0; \
- x4 ^= x2;
-
-struct serpent_ctx {
- u32 expkey[SERPENT_EXPKEY_WORDS];
-};
-
-
-static int serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
- unsigned int keylen)
-{
- struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
- u32 *k = ctx->expkey;
- u8 *k8 = (u8 *)k;
- u32 r0,r1,r2,r3,r4;
- int i;
-
- /* Copy key, add padding */
-
- for (i = 0; i < keylen; ++i)
- k8[i] = key[i];
- if (i < SERPENT_MAX_KEY_SIZE)
- k8[i++] = 1;
- while (i < SERPENT_MAX_KEY_SIZE)
- k8[i++] = 0;
-
- /* Expand key using polynomial */
-
- r0 = le32_to_cpu(k[3]);
- r1 = le32_to_cpu(k[4]);
- r2 = le32_to_cpu(k[5]);
- r3 = le32_to_cpu(k[6]);
- r4 = le32_to_cpu(k[7]);
-
- keyiter(le32_to_cpu(k[0]),r0,r4,r2,0,0);
- keyiter(le32_to_cpu(k[1]),r1,r0,r3,1,1);
- keyiter(le32_to_cpu(k[2]),r2,r1,r4,2,2);
- keyiter(le32_to_cpu(k[3]),r3,r2,r0,3,3);
- keyiter(le32_to_cpu(k[4]),r4,r3,r1,4,4);
- keyiter(le32_to_cpu(k[5]),r0,r4,r2,5,5);
- keyiter(le32_to_cpu(k[6]),r1,r0,r3,6,6);
- keyiter(le32_to_cpu(k[7]),r2,r1,r4,7,7);
-
- keyiter(k[ 0],r3,r2,r0, 8, 8); keyiter(k[ 1],r4,r3,r1, 9, 9);
- keyiter(k[ 2],r0,r4,r2, 10, 10); keyiter(k[ 3],r1,r0,r3, 11, 11);
- keyiter(k[ 4],r2,r1,r4, 12, 12); keyiter(k[ 5],r3,r2,r0, 13, 13);
- keyiter(k[ 6],r4,r3,r1, 14, 14); keyiter(k[ 7],r0,r4,r2, 15, 15);
- keyiter(k[ 8],r1,r0,r3, 16, 16); keyiter(k[ 9],r2,r1,r4, 17, 17);
- keyiter(k[ 10],r3,r2,r0, 18, 18); keyiter(k[ 11],r4,r3,r1, 19, 19);
- keyiter(k[ 12],r0,r4,r2, 20, 20); keyiter(k[ 13],r1,r0,r3, 21, 21);
- keyiter(k[ 14],r2,r1,r4, 22, 22); keyiter(k[ 15],r3,r2,r0, 23, 23);
- keyiter(k[ 16],r4,r3,r1, 24, 24); keyiter(k[ 17],r0,r4,r2, 25, 25);
- keyiter(k[ 18],r1,r0,r3, 26, 26); keyiter(k[ 19],r2,r1,r4, 27, 27);
- keyiter(k[ 20],r3,r2,r0, 28, 28); keyiter(k[ 21],r4,r3,r1, 29, 29);
- keyiter(k[ 22],r0,r4,r2, 30, 30); keyiter(k[ 23],r1,r0,r3, 31, 31);
-
- k += 50;
-
- keyiter(k[-26],r2,r1,r4, 32,-18); keyiter(k[-25],r3,r2,r0, 33,-17);
- keyiter(k[-24],r4,r3,r1, 34,-16); keyiter(k[-23],r0,r4,r2, 35,-15);
- keyiter(k[-22],r1,r0,r3, 36,-14); keyiter(k[-21],r2,r1,r4, 37,-13);
- keyiter(k[-20],r3,r2,r0, 38,-12); keyiter(k[-19],r4,r3,r1, 39,-11);
- keyiter(k[-18],r0,r4,r2, 40,-10); keyiter(k[-17],r1,r0,r3, 41, -9);
- keyiter(k[-16],r2,r1,r4, 42, -8); keyiter(k[-15],r3,r2,r0, 43, -7);
- keyiter(k[-14],r4,r3,r1, 44, -6); keyiter(k[-13],r0,r4,r2, 45, -5);
- keyiter(k[-12],r1,r0,r3, 46, -4); keyiter(k[-11],r2,r1,r4, 47, -3);
- keyiter(k[-10],r3,r2,r0, 48, -2); keyiter(k[ -9],r4,r3,r1, 49, -1);
- keyiter(k[ -8],r0,r4,r2, 50, 0); keyiter(k[ -7],r1,r0,r3, 51, 1);
- keyiter(k[ -6],r2,r1,r4, 52, 2); keyiter(k[ -5],r3,r2,r0, 53, 3);
- keyiter(k[ -4],r4,r3,r1, 54, 4); keyiter(k[ -3],r0,r4,r2, 55, 5);
- keyiter(k[ -2],r1,r0,r3, 56, 6); keyiter(k[ -1],r2,r1,r4, 57, 7);
- keyiter(k[ 0],r3,r2,r0, 58, 8); keyiter(k[ 1],r4,r3,r1, 59, 9);
- keyiter(k[ 2],r0,r4,r2, 60, 10); keyiter(k[ 3],r1,r0,r3, 61, 11);
- keyiter(k[ 4],r2,r1,r4, 62, 12); keyiter(k[ 5],r3,r2,r0, 63, 13);
- keyiter(k[ 6],r4,r3,r1, 64, 14); keyiter(k[ 7],r0,r4,r2, 65, 15);
- keyiter(k[ 8],r1,r0,r3, 66, 16); keyiter(k[ 9],r2,r1,r4, 67, 17);
- keyiter(k[ 10],r3,r2,r0, 68, 18); keyiter(k[ 11],r4,r3,r1, 69, 19);
- keyiter(k[ 12],r0,r4,r2, 70, 20); keyiter(k[ 13],r1,r0,r3, 71, 21);
- keyiter(k[ 14],r2,r1,r4, 72, 22); keyiter(k[ 15],r3,r2,r0, 73, 23);
- keyiter(k[ 16],r4,r3,r1, 74, 24); keyiter(k[ 17],r0,r4,r2, 75, 25);
- keyiter(k[ 18],r1,r0,r3, 76, 26); keyiter(k[ 19],r2,r1,r4, 77, 27);
- keyiter(k[ 20],r3,r2,r0, 78, 28); keyiter(k[ 21],r4,r3,r1, 79, 29);
- keyiter(k[ 22],r0,r4,r2, 80, 30); keyiter(k[ 23],r1,r0,r3, 81, 31);
-
- k += 50;
-
- keyiter(k[-26],r2,r1,r4, 82,-18); keyiter(k[-25],r3,r2,r0, 83,-17);
- keyiter(k[-24],r4,r3,r1, 84,-16); keyiter(k[-23],r0,r4,r2, 85,-15);
- keyiter(k[-22],r1,r0,r3, 86,-14); keyiter(k[-21],r2,r1,r4, 87,-13);
- keyiter(k[-20],r3,r2,r0, 88,-12); keyiter(k[-19],r4,r3,r1, 89,-11);
- keyiter(k[-18],r0,r4,r2, 90,-10); keyiter(k[-17],r1,r0,r3, 91, -9);
- keyiter(k[-16],r2,r1,r4, 92, -8); keyiter(k[-15],r3,r2,r0, 93, -7);
- keyiter(k[-14],r4,r3,r1, 94, -6); keyiter(k[-13],r0,r4,r2, 95, -5);
- keyiter(k[-12],r1,r0,r3, 96, -4); keyiter(k[-11],r2,r1,r4, 97, -3);
- keyiter(k[-10],r3,r2,r0, 98, -2); keyiter(k[ -9],r4,r3,r1, 99, -1);
- keyiter(k[ -8],r0,r4,r2,100, 0); keyiter(k[ -7],r1,r0,r3,101, 1);
- keyiter(k[ -6],r2,r1,r4,102, 2); keyiter(k[ -5],r3,r2,r0,103, 3);
- keyiter(k[ -4],r4,r3,r1,104, 4); keyiter(k[ -3],r0,r4,r2,105, 5);
- keyiter(k[ -2],r1,r0,r3,106, 6); keyiter(k[ -1],r2,r1,r4,107, 7);
- keyiter(k[ 0],r3,r2,r0,108, 8); keyiter(k[ 1],r4,r3,r1,109, 9);
- keyiter(k[ 2],r0,r4,r2,110, 10); keyiter(k[ 3],r1,r0,r3,111, 11);
- keyiter(k[ 4],r2,r1,r4,112, 12); keyiter(k[ 5],r3,r2,r0,113, 13);
- keyiter(k[ 6],r4,r3,r1,114, 14); keyiter(k[ 7],r0,r4,r2,115, 15);
- keyiter(k[ 8],r1,r0,r3,116, 16); keyiter(k[ 9],r2,r1,r4,117, 17);
- keyiter(k[ 10],r3,r2,r0,118, 18); keyiter(k[ 11],r4,r3,r1,119, 19);
- keyiter(k[ 12],r0,r4,r2,120, 20); keyiter(k[ 13],r1,r0,r3,121, 21);
- keyiter(k[ 14],r2,r1,r4,122, 22); keyiter(k[ 15],r3,r2,r0,123, 23);
- keyiter(k[ 16],r4,r3,r1,124, 24); keyiter(k[ 17],r0,r4,r2,125, 25);
- keyiter(k[ 18],r1,r0,r3,126, 26); keyiter(k[ 19],r2,r1,r4,127, 27);
- keyiter(k[ 20],r3,r2,r0,128, 28); keyiter(k[ 21],r4,r3,r1,129, 29);
- keyiter(k[ 22],r0,r4,r2,130, 30); keyiter(k[ 23],r1,r0,r3,131, 31);
-
- /* Apply S-boxes */
-
- S3(r3,r4,r0,r1,r2); storekeys(r1,r2,r4,r3, 28); loadkeys(r1,r2,r4,r3, 24);
- S4(r1,r2,r4,r3,r0); storekeys(r2,r4,r3,r0, 24); loadkeys(r2,r4,r3,r0, 20);
- S5(r2,r4,r3,r0,r1); storekeys(r1,r2,r4,r0, 20); loadkeys(r1,r2,r4,r0, 16);
- S6(r1,r2,r4,r0,r3); storekeys(r4,r3,r2,r0, 16); loadkeys(r4,r3,r2,r0, 12);
- S7(r4,r3,r2,r0,r1); storekeys(r1,r2,r0,r4, 12); loadkeys(r1,r2,r0,r4, 8);
- S0(r1,r2,r0,r4,r3); storekeys(r0,r2,r4,r1, 8); loadkeys(r0,r2,r4,r1, 4);
- S1(r0,r2,r4,r1,r3); storekeys(r3,r4,r1,r0, 4); loadkeys(r3,r4,r1,r0, 0);
- S2(r3,r4,r1,r0,r2); storekeys(r2,r4,r3,r0, 0); loadkeys(r2,r4,r3,r0, -4);
- S3(r2,r4,r3,r0,r1); storekeys(r0,r1,r4,r2, -4); loadkeys(r0,r1,r4,r2, -8);
- S4(r0,r1,r4,r2,r3); storekeys(r1,r4,r2,r3, -8); loadkeys(r1,r4,r2,r3,-12);
- S5(r1,r4,r2,r3,r0); storekeys(r0,r1,r4,r3,-12); loadkeys(r0,r1,r4,r3,-16);
- S6(r0,r1,r4,r3,r2); storekeys(r4,r2,r1,r3,-16); loadkeys(r4,r2,r1,r3,-20);
- S7(r4,r2,r1,r3,r0); storekeys(r0,r1,r3,r4,-20); loadkeys(r0,r1,r3,r4,-24);
- S0(r0,r1,r3,r4,r2); storekeys(r3,r1,r4,r0,-24); loadkeys(r3,r1,r4,r0,-28);
- k -= 50;
- S1(r3,r1,r4,r0,r2); storekeys(r2,r4,r0,r3, 22); loadkeys(r2,r4,r0,r3, 18);
- S2(r2,r4,r0,r3,r1); storekeys(r1,r4,r2,r3, 18); loadkeys(r1,r4,r2,r3, 14);
- S3(r1,r4,r2,r3,r0); storekeys(r3,r0,r4,r1, 14); loadkeys(r3,r0,r4,r1, 10);
- S4(r3,r0,r4,r1,r2); storekeys(r0,r4,r1,r2, 10); loadkeys(r0,r4,r1,r2, 6);
- S5(r0,r4,r1,r2,r3); storekeys(r3,r0,r4,r2, 6); loadkeys(r3,r0,r4,r2, 2);
- S6(r3,r0,r4,r2,r1); storekeys(r4,r1,r0,r2, 2); loadkeys(r4,r1,r0,r2, -2);
- S7(r4,r1,r0,r2,r3); storekeys(r3,r0,r2,r4, -2); loadkeys(r3,r0,r2,r4, -6);
- S0(r3,r0,r2,r4,r1); storekeys(r2,r0,r4,r3, -6); loadkeys(r2,r0,r4,r3,-10);
- S1(r2,r0,r4,r3,r1); storekeys(r1,r4,r3,r2,-10); loadkeys(r1,r4,r3,r2,-14);
- S2(r1,r4,r3,r2,r0); storekeys(r0,r4,r1,r2,-14); loadkeys(r0,r4,r1,r2,-18);
- S3(r0,r4,r1,r2,r3); storekeys(r2,r3,r4,r0,-18); loadkeys(r2,r3,r4,r0,-22);
- k -= 50;
- S4(r2,r3,r4,r0,r1); storekeys(r3,r4,r0,r1, 28); loadkeys(r3,r4,r0,r1, 24);
- S5(r3,r4,r0,r1,r2); storekeys(r2,r3,r4,r1, 24); loadkeys(r2,r3,r4,r1, 20);
- S6(r2,r3,r4,r1,r0); storekeys(r4,r0,r3,r1, 20); loadkeys(r4,r0,r3,r1, 16);
- S7(r4,r0,r3,r1,r2); storekeys(r2,r3,r1,r4, 16); loadkeys(r2,r3,r1,r4, 12);
- S0(r2,r3,r1,r4,r0); storekeys(r1,r3,r4,r2, 12); loadkeys(r1,r3,r4,r2, 8);
- S1(r1,r3,r4,r2,r0); storekeys(r0,r4,r2,r1, 8); loadkeys(r0,r4,r2,r1, 4);
- S2(r0,r4,r2,r1,r3); storekeys(r3,r4,r0,r1, 4); loadkeys(r3,r4,r0,r1, 0);
- S3(r3,r4,r0,r1,r2); storekeys(r1,r2,r4,r3, 0);
-
- return 0;
-}
-
-static void serpent_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
- const u32
- *k = ctx->expkey;
- const __le32 *s = (const __le32 *)src;
- __le32 *d = (__le32 *)dst;
- u32 r0, r1, r2, r3, r4;
-
-/*
- * Note: The conversions between u8* and u32* might cause trouble
- * on architectures with stricter alignment rules than x86
- */
-
- r0 = le32_to_cpu(s[0]);
- r1 = le32_to_cpu(s[1]);
- r2 = le32_to_cpu(s[2]);
- r3 = le32_to_cpu(s[3]);
-
- K(r0,r1,r2,r3,0);
- S0(r0,r1,r2,r3,r4); LK(r2,r1,r3,r0,r4,1);
- S1(r2,r1,r3,r0,r4); LK(r4,r3,r0,r2,r1,2);
- S2(r4,r3,r0,r2,r1); LK(r1,r3,r4,r2,r0,3);
- S3(r1,r3,r4,r2,r0); LK(r2,r0,r3,r1,r4,4);
- S4(r2,r0,r3,r1,r4); LK(r0,r3,r1,r4,r2,5);
- S5(r0,r3,r1,r4,r2); LK(r2,r0,r3,r4,r1,6);
- S6(r2,r0,r3,r4,r1); LK(r3,r1,r0,r4,r2,7);
- S7(r3,r1,r0,r4,r2); LK(r2,r0,r4,r3,r1,8);
- S0(r2,r0,r4,r3,r1); LK(r4,r0,r3,r2,r1,9);
- S1(r4,r0,r3,r2,r1); LK(r1,r3,r2,r4,r0,10);
- S2(r1,r3,r2,r4,r0); LK(r0,r3,r1,r4,r2,11);
- S3(r0,r3,r1,r4,r2); LK(r4,r2,r3,r0,r1,12);
- S4(r4,r2,r3,r0,r1); LK(r2,r3,r0,r1,r4,13);
- S5(r2,r3,r0,r1,r4); LK(r4,r2,r3,r1,r0,14);
- S6(r4,r2,r3,r1,r0); LK(r3,r0,r2,r1,r4,15);
- S7(r3,r0,r2,r1,r4); LK(r4,r2,r1,r3,r0,16);
- S0(r4,r2,r1,r3,r0); LK(r1,r2,r3,r4,r0,17);
- S1(r1,r2,r3,r4,r0); LK(r0,r3,r4,r1,r2,18);
- S2(r0,r3,r4,r1,r2); LK(r2,r3,r0,r1,r4,19);
- S3(r2,r3,r0,r1,r4); LK(r1,r4,r3,r2,r0,20);
- S4(r1,r4,r3,r2,r0); LK(r4,r3,r2,r0,r1,21);
- S5(r4,r3,r2,r0,r1); LK(r1,r4,r3,r0,r2,22);
- S6(r1,r4,r3,r0,r2); LK(r3,r2,r4,r0,r1,23);
- S7(r3,r2,r4,r0,r1); LK(r1,r4,r0,r3,r2,24);
- S0(r1,r4,r0,r3,r2); LK(r0,r4,r3,r1,r2,25);
- S1(r0,r4,r3,r1,r2); LK(r2,r3,r1,r0,r4,26);
- S2(r2,r3,r1,r0,r4); LK(r4,r3,r2,r0,r1,27);
- S3(r4,r3,r2,r0,r1); LK(r0,r1,r3,r4,r2,28);
- S4(r0,r1,r3,r4,r2); LK(r1,r3,r4,r2,r0,29);
- S5(r1,r3,r4,r2,r0); LK(r0,r1,r3,r2,r4,30);
- S6(r0,r1,r3,r2,r4); LK(r3,r4,r1,r2,r0,31);
- S7(r3,r4,r1,r2,r0); K(r0,r1,r2,r3,32);
-
- d[0] = cpu_to_le32(r0);
- d[1] = cpu_to_le32(r1);
- d[2] = cpu_to_le32(r2);
- d[3] = cpu_to_le32(r3);
-}
-
-static void serpent_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
- const u32
- *k = ((struct serpent_ctx *)ctx)->expkey;
- const __le32 *s = (const __le32 *)src;
- __le32 *d = (__le32 *)dst;
- u32 r0, r1, r2, r3, r4;
-
- r0 = le32_to_cpu(s[0]);
- r1 = le32_to_cpu(s[1]);
- r2 = le32_to_cpu(s[2]);
- r3 = le32_to_cpu(s[3]);
-
- K(r0,r1,r2,r3,32);
- SI7(r0,r1,r2,r3,r4); KL(r1,r3,r0,r4,r2,31);
- SI6(r1,r3,r0,r4,r2); KL(r0,r2,r4,r1,r3,30);
- SI5(r0,r2,r4,r1,r3); KL(r2,r3,r0,r4,r1,29);
- SI4(r2,r3,r0,r4,r1); KL(r2,r0,r1,r4,r3,28);
- SI3(r2,r0,r1,r4,r3); KL(r1,r2,r3,r4,r0,27);
- SI2(r1,r2,r3,r4,r0); KL(r2,r0,r4,r3,r1,26);
- SI1(r2,r0,r4,r3,r1); KL(r1,r0,r4,r3,r2,25);
- SI0(r1,r0,r4,r3,r2); KL(r4,r2,r0,r1,r3,24);
- SI7(r4,r2,r0,r1,r3); KL(r2,r1,r4,r3,r0,23);
- SI6(r2,r1,r4,r3,r0); KL(r4,r0,r3,r2,r1,22);
- SI5(r4,r0,r3,r2,r1); KL(r0,r1,r4,r3,r2,21);
- SI4(r0,r1,r4,r3,r2); KL(r0,r4,r2,r3,r1,20);
- SI3(r0,r4,r2,r3,r1); KL(r2,r0,r1,r3,r4,19);
- SI2(r2,r0,r1,r3,r4); KL(r0,r4,r3,r1,r2,18);
- SI1(r0,r4,r3,r1,r2); KL(r2,r4,r3,r1,r0,17);
- SI0(r2,r4,r3,r1,r0); KL(r3,r0,r4,r2,r1,16);
- SI7(r3,r0,r4,r2,r1); KL(r0,r2,r3,r1,r4,15);
- SI6(r0,r2,r3,r1,r4); KL(r3,r4,r1,r0,r2,14);
- SI5(r3,r4,r1,r0,r2); KL(r4,r2,r3,r1,r0,13);
- SI4(r4,r2,r3,r1,r0); KL(r4,r3,r0,r1,r2,12);
- SI3(r4,r3,r0,r1,r2); KL(r0,r4,r2,r1,r3,11);
- SI2(r0,r4,r2,r1,r3); KL(r4,r3,r1,r2,r0,10);
- SI1(r4,r3,r1,r2,r0); KL(r0,r3,r1,r2,r4,9);
- SI0(r0,r3,r1,r2,r4); KL(r1,r4,r3,r0,r2,8);
- SI7(r1,r4,r3,r0,r2); KL(r4,r0,r1,r2,r3,7);
- SI6(r4,r0,r1,r2,r3); KL(r1,r3,r2,r4,r0,6);
- SI5(r1,r3,r2,r4,r0); KL(r3,r0,r1,r2,r4,5);
- SI4(r3,r0,r1,r2,r4); KL(r3,r1,r4,r2,r0,4);
- SI3(r3,r1,r4,r2,r0); KL(r4,r3,r0,r2,r1,3);
- SI2(r4,r3,r0,r2,r1); KL(r3,r1,r2,r0,r4,2);
- SI1(r3,r1,r2,r0,r4); KL(r4,r1,r2,r0,r3,1);
- SI0(r4,r1,r2,r0,r3); K(r2,r3,r1,r4,0);
-
- d[0] = cpu_to_le32(r2);
- d[1] = cpu_to_le32(r3);
- d[2] = cpu_to_le32(r1);
- d[3] = cpu_to_le32(r4);
-}
-
-static struct crypto_alg serpent_alg = {
- .cra_name = "serpent",
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
- .cra_blocksize = SERPENT_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct serpent_ctx),
- .cra_alignmask = 3,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(serpent_alg.cra_list),
- .cra_u = { .cipher = {
- .cia_min_keysize = SERPENT_MIN_KEY_SIZE,
- .cia_max_keysize = SERPENT_MAX_KEY_SIZE,
- .cia_setkey = serpent_setkey,
- .cia_encrypt = serpent_encrypt,
- .cia_decrypt = serpent_decrypt } }
-};
-
-static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key,
- unsigned int keylen)
-{
- u8 rev_key[SERPENT_MAX_KEY_SIZE];
- int i;
-
- for (i = 0; i < keylen; ++i)
- rev_key[keylen - i - 1] = key[i];
-
- return serpent_setkey(tfm, rev_key, keylen);
-}
-
-static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- const u32 * const s = (const u32 * const)src;
- u32 * const d = (u32 * const)dst;
-
- u32 rs[4], rd[4];
-
- rs[0] = swab32(s[3]);
- rs[1] = swab32(s[2]);
- rs[2] = swab32(s[1]);
- rs[3] = swab32(s[0]);
-
- serpent_encrypt(tfm, (u8 *)rd, (u8 *)rs);
-
- d[0] = swab32(rd[3]);
- d[1] = swab32(rd[2]);
- d[2] = swab32(rd[1]);
- d[3] = swab32(rd[0]);
-}
-
-static void tnepres_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- const u32 * const s = (const u32 * const)src;
- u32 * const d = (u32 * const)dst;
-
- u32 rs[4], rd[4];
-
- rs[0] = swab32(s[3]);
- rs[1] = swab32(s[2]);
- rs[2] = swab32(s[1]);
- rs[3] = swab32(s[0]);
-
- serpent_decrypt(tfm, (u8 *)rd, (u8 *)rs);
-
- d[0] = swab32(rd[3]);
- d[1] = swab32(rd[2]);
- d[2] = swab32(rd[1]);
- d[3] = swab32(rd[0]);
-}
-
-static struct crypto_alg tnepres_alg = {
- .cra_name = "tnepres",
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
- .cra_blocksize = SERPENT_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct serpent_ctx),
- .cra_alignmask = 3,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(serpent_alg.cra_list),
- .cra_u = { .cipher = {
- .cia_min_keysize = SERPENT_MIN_KEY_SIZE,
- .cia_max_keysize = SERPENT_MAX_KEY_SIZE,
- .cia_setkey = tnepres_setkey,
- .cia_encrypt = tnepres_encrypt,
- .cia_decrypt = tnepres_decrypt } }
-};
-
-static int __init serpent_mod_init(void)
-{
- int ret = crypto_register_alg(&serpent_alg);
-
- if (ret)
- return ret;
-
- ret = crypto_register_alg(&tnepres_alg);
-
- if (ret)
- crypto_unregister_alg(&serpent_alg);
-
- return ret;
-}
-
-static void __exit serpent_mod_fini(void)
-{
- crypto_unregister_alg(&tnepres_alg);
- crypto_unregister_alg(&serpent_alg);
-}
-
-module_init(serpent_mod_init);
-module_exit(serpent_mod_fini);
-
-MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION("Serpent and tnepres (kerneli compatible serpent reversed) Cipher Algorithm");
-MODULE_AUTHOR("Dag Arne Osvik <osvik@ii.uib.no>");
-MODULE_ALIAS("tnepres");
diff --git a/crypto/serpent_generic.c b/crypto/serpent_generic.c
new file mode 100644
index 00000000000..7ddbd7e8885
--- /dev/null
+++ b/crypto/serpent_generic.c
@@ -0,0 +1,669 @@
+/*
+ * Cryptographic API.
+ *
+ * Serpent Cipher Algorithm.
+ *
+ * Copyright (C) 2002 Dag Arne Osvik <osvik@ii.uib.no>
+ * 2003 Herbert Valerio Riedel <hvr@gnu.org>
+ *
+ * Added tnepres support:
+ * Ruben Jesus Garcia Hernandez <ruben@ugr.es>, 18.10.2004
+ * Based on code by hvr
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ */
+
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/errno.h>
+#include <asm/byteorder.h>
+#include <linux/crypto.h>
+#include <linux/types.h>
+#include <crypto/serpent.h>
+
+/* Key is padded to the maximum of 256 bits before round key generation.
+ * Any key length <= 256 bits (32 bytes) is allowed by the algorithm.
+ */
+
+#define PHI 0x9e3779b9UL
+
+#define keyiter(a, b, c, d, i, j) \
+ ({ b ^= d; b ^= c; b ^= a; b ^= PHI ^ i; b = rol32(b, 11); k[j] = b; })
+
+#define loadkeys(x0, x1, x2, x3, i) \
+ ({ x0 = k[i]; x1 = k[i+1]; x2 = k[i+2]; x3 = k[i+3]; })
+
+#define storekeys(x0, x1, x2, x3, i) \
+ ({ k[i] = x0; k[i+1] = x1; k[i+2] = x2; k[i+3] = x3; })
+
+#define store_and_load_keys(x0, x1, x2, x3, s, l) \
+ ({ storekeys(x0, x1, x2, x3, s); loadkeys(x0, x1, x2, x3, l); })
+
+#define K(x0, x1, x2, x3, i) ({ \
+ x3 ^= k[4*(i)+3]; x2 ^= k[4*(i)+2]; \
+ x1 ^= k[4*(i)+1]; x0 ^= k[4*(i)+0]; \
+ })
+
+#define LK(x0, x1, x2, x3, x4, i) ({ \
+ x0 = rol32(x0, 13);\
+ x2 = rol32(x2, 3); x1 ^= x0; x4 = x0 << 3; \
+ x3 ^= x2; x1 ^= x2; \
+ x1 = rol32(x1, 1); x3 ^= x4; \
+ x3 = rol32(x3, 7); x4 = x1; \
+ x0 ^= x1; x4 <<= 7; x2 ^= x3; \
+ x0 ^= x3; x2 ^= x4; x3 ^= k[4*i+3]; \
+ x1 ^= k[4*i+1]; x0 = rol32(x0, 5); x2 = rol32(x2, 22);\
+ x0 ^= k[4*i+0]; x2 ^= k[4*i+2]; \
+ })
+
+#define KL(x0, x1, x2, x3, x4, i) ({ \
+ x0 ^= k[4*i+0]; x1 ^= k[4*i+1]; x2 ^= k[4*i+2]; \
+ x3 ^= k[4*i+3]; x0 = ror32(x0, 5); x2 = ror32(x2, 22);\
+ x4 = x1; x2 ^= x3; x0 ^= x3; \
+ x4 <<= 7; x0 ^= x1; x1 = ror32(x1, 1); \
+ x2 ^= x4; x3 = ror32(x3, 7); x4 = x0 << 3; \
+ x1 ^= x0; x3 ^= x4; x0 = ror32(x0, 13);\
+ x1 ^= x2; x3 ^= x2; x2 = ror32(x2, 3); \
+ })
+
+#define S0(x0, x1, x2, x3, x4) ({ \
+ x4 = x3; \
+ x3 |= x0; x0 ^= x4; x4 ^= x2; \
+ x4 = ~x4; x3 ^= x1; x1 &= x0; \
+ x1 ^= x4; x2 ^= x0; x0 ^= x3; \
+ x4 |= x0; x0 ^= x2; x2 &= x1; \
+ x3 ^= x2; x1 = ~x1; x2 ^= x4; \
+ x1 ^= x2; \
+ })
+
+#define S1(x0, x1, x2, x3, x4) ({ \
+ x4 = x1; \
+ x1 ^= x0; x0 ^= x3; x3 = ~x3; \
+ x4 &= x1; x0 |= x1; x3 ^= x2; \
+ x0 ^= x3; x1 ^= x3; x3 ^= x4; \
+ x1 |= x4; x4 ^= x2; x2 &= x0; \
+ x2 ^= x1; x1 |= x0; x0 = ~x0; \
+ x0 ^= x2; x4 ^= x1; \
+ })
+
+#define S2(x0, x1, x2, x3, x4) ({ \
+ x3 = ~x3; \
+ x1 ^= x0; x4 = x0; x0 &= x2; \
+ x0 ^= x3; x3 |= x4; x2 ^= x1; \
+ x3 ^= x1; x1 &= x0; x0 ^= x2; \
+ x2 &= x3; x3 |= x1; x0 = ~x0; \
+ x3 ^= x0; x4 ^= x0; x0 ^= x2; \
+ x1 |= x2; \
+ })
+
+#define S3(x0, x1, x2, x3, x4) ({ \
+ x4 = x1; \
+ x1 ^= x3; x3 |= x0; x4 &= x0; \
+ x0 ^= x2; x2 ^= x1; x1 &= x3; \
+ x2 ^= x3; x0 |= x4; x4 ^= x3; \
+ x1 ^= x0; x0 &= x3; x3 &= x4; \
+ x3 ^= x2; x4 |= x1; x2 &= x1; \
+ x4 ^= x3; x0 ^= x3; x3 ^= x2; \
+ })
+
+#define S4(x0, x1, x2, x3, x4) ({ \
+ x4 = x3; \
+ x3 &= x0; x0 ^= x4; \
+ x3 ^= x2; x2 |= x4; x0 ^= x1; \
+ x4 ^= x3; x2 |= x0; \
+ x2 ^= x1; x1 &= x0; \
+ x1 ^= x4; x4 &= x2; x2 ^= x3; \
+ x4 ^= x0; x3 |= x1; x1 = ~x1; \
+ x3 ^= x0; \
+ })
+
+#define S5(x0, x1, x2, x3, x4) ({ \
+ x4 = x1; x1 |= x0; \
+ x2 ^= x1; x3 = ~x3; x4 ^= x0; \
+ x0 ^= x2; x1 &= x4; x4 |= x3; \
+ x4 ^= x0; x0 &= x3; x1 ^= x3; \
+ x3 ^= x2; x0 ^= x1; x2 &= x4; \
+ x1 ^= x2; x2 &= x0; \
+ x3 ^= x2; \
+ })
+
+#define S6(x0, x1, x2, x3, x4) ({ \
+ x4 = x1; \
+ x3 ^= x0; x1 ^= x2; x2 ^= x0; \
+ x0 &= x3; x1 |= x3; x4 = ~x4; \
+ x0 ^= x1; x1 ^= x2; \
+ x3 ^= x4; x4 ^= x0; x2 &= x0; \
+ x4 ^= x1; x2 ^= x3; x3 &= x1; \
+ x3 ^= x0; x1 ^= x2; \
+ })
+
+#define S7(x0, x1, x2, x3, x4) ({ \
+ x1 = ~x1; \
+ x4 = x1; x0 = ~x0; x1 &= x2; \
+ x1 ^= x3; x3 |= x4; x4 ^= x2; \
+ x2 ^= x3; x3 ^= x0; x0 |= x1; \
+ x2 &= x0; x0 ^= x4; x4 ^= x3; \
+ x3 &= x0; x4 ^= x1; \
+ x2 ^= x4; x3 ^= x1; x4 |= x0; \
+ x4 ^= x1; \
+ })
+
+#define SI0(x0, x1, x2, x3, x4) ({ \
+ x4 = x3; x1 ^= x0; \
+ x3 |= x1; x4 ^= x1; x0 = ~x0; \
+ x2 ^= x3; x3 ^= x0; x0 &= x1; \
+ x0 ^= x2; x2 &= x3; x3 ^= x4; \
+ x2 ^= x3; x1 ^= x3; x3 &= x0; \
+ x1 ^= x0; x0 ^= x2; x4 ^= x3; \
+ })
+
+#define SI1(x0, x1, x2, x3, x4) ({ \
+ x1 ^= x3; x4 = x0; \
+ x0 ^= x2; x2 = ~x2; x4 |= x1; \
+ x4 ^= x3; x3 &= x1; x1 ^= x2; \
+ x2 &= x4; x4 ^= x1; x1 |= x3; \
+ x3 ^= x0; x2 ^= x0; x0 |= x4; \
+ x2 ^= x4; x1 ^= x0; \
+ x4 ^= x1; \
+ })
+
+#define SI2(x0, x1, x2, x3, x4) ({ \
+ x2 ^= x1; x4 = x3; x3 = ~x3; \
+ x3 |= x2; x2 ^= x4; x4 ^= x0; \
+ x3 ^= x1; x1 |= x2; x2 ^= x0; \
+ x1 ^= x4; x4 |= x3; x2 ^= x3; \
+ x4 ^= x2; x2 &= x1; \
+ x2 ^= x3; x3 ^= x4; x4 ^= x0; \
+ })
+
+#define SI3(x0, x1, x2, x3, x4) ({ \
+ x2 ^= x1; \
+ x4 = x1; x1 &= x2; \
+ x1 ^= x0; x0 |= x4; x4 ^= x3; \
+ x0 ^= x3; x3 |= x1; x1 ^= x2; \
+ x1 ^= x3; x0 ^= x2; x2 ^= x3; \
+ x3 &= x1; x1 ^= x0; x0 &= x2; \
+ x4 ^= x3; x3 ^= x0; x0 ^= x1; \
+ })
+
+#define SI4(x0, x1, x2, x3, x4) ({ \
+ x2 ^= x3; x4 = x0; x0 &= x1; \
+ x0 ^= x2; x2 |= x3; x4 = ~x4; \
+ x1 ^= x0; x0 ^= x2; x2 &= x4; \
+ x2 ^= x0; x0 |= x4; \
+ x0 ^= x3; x3 &= x2; \
+ x4 ^= x3; x3 ^= x1; x1 &= x0; \
+ x4 ^= x1; x0 ^= x3; \
+ })
+
+#define SI5(x0, x1, x2, x3, x4) ({ \
+ x4 = x1; x1 |= x2; \
+ x2 ^= x4; x1 ^= x3; x3 &= x4; \
+ x2 ^= x3; x3 |= x0; x0 = ~x0; \
+ x3 ^= x2; x2 |= x0; x4 ^= x1; \
+ x2 ^= x4; x4 &= x0; x0 ^= x1; \
+ x1 ^= x3; x0 &= x2; x2 ^= x3; \
+ x0 ^= x2; x2 ^= x4; x4 ^= x3; \
+ })
+
+#define SI6(x0, x1, x2, x3, x4) ({ \
+ x0 ^= x2; \
+ x4 = x0; x0 &= x3; x2 ^= x3; \
+ x0 ^= x2; x3 ^= x1; x2 |= x4; \
+ x2 ^= x3; x3 &= x0; x0 = ~x0; \
+ x3 ^= x1; x1 &= x2; x4 ^= x0; \
+ x3 ^= x4; x4 ^= x2; x0 ^= x1; \
+ x2 ^= x0; \
+ })
+
+#define SI7(x0, x1, x2, x3, x4) ({ \
+ x4 = x3; x3 &= x0; x0 ^= x2; \
+ x2 |= x4; x4 ^= x1; x0 = ~x0; \
+ x1 |= x3; x4 ^= x0; x0 &= x2; \
+ x0 ^= x1; x1 &= x2; x3 ^= x2; \
+ x4 ^= x3; x2 &= x3; x3 |= x0; \
+ x1 ^= x4; x3 ^= x4; x4 &= x0; \
+ x4 ^= x2; \
+ })
+
+int __serpent_setkey(struct serpent_ctx *ctx, const u8 *key,
+ unsigned int keylen)
+{
+ u32 *k = ctx->expkey;
+ u8 *k8 = (u8 *)k;
+ u32 r0, r1, r2, r3, r4;
+ int i;
+
+ /* Copy key, add padding */
+
+ for (i = 0; i < keylen; ++i)
+ k8[i] = key[i];
+ if (i < SERPENT_MAX_KEY_SIZE)
+ k8[i++] = 1;
+ while (i < SERPENT_MAX_KEY_SIZE)
+ k8[i++] = 0;
+
+ /* Expand key using polynomial */
+
+ r0 = le32_to_cpu(k[3]);
+ r1 = le32_to_cpu(k[4]);
+ r2 = le32_to_cpu(k[5]);
+ r3 = le32_to_cpu(k[6]);
+ r4 = le32_to_cpu(k[7]);
+
+ keyiter(le32_to_cpu(k[0]), r0, r4, r2, 0, 0);
+ keyiter(le32_to_cpu(k[1]), r1, r0, r3, 1, 1);
+ keyiter(le32_to_cpu(k[2]), r2, r1, r4, 2, 2);
+ keyiter(le32_to_cpu(k[3]), r3, r2, r0, 3, 3);
+ keyiter(le32_to_cpu(k[4]), r4, r3, r1, 4, 4);
+ keyiter(le32_to_cpu(k[5]), r0, r4, r2, 5, 5);
+ keyiter(le32_to_cpu(k[6]), r1, r0, r3, 6, 6);
+ keyiter(le32_to_cpu(k[7]), r2, r1, r4, 7, 7);
+
+ keyiter(k[0], r3, r2, r0, 8, 8);
+ keyiter(k[1], r4, r3, r1, 9, 9);
+ keyiter(k[2], r0, r4, r2, 10, 10);
+ keyiter(k[3], r1, r0, r3, 11, 11);
+ keyiter(k[4], r2, r1, r4, 12, 12);
+ keyiter(k[5], r3, r2, r0, 13, 13);
+ keyiter(k[6], r4, r3, r1, 14, 14);
+ keyiter(k[7], r0, r4, r2, 15, 15);
+ keyiter(k[8], r1, r0, r3, 16, 16);
+ keyiter(k[9], r2, r1, r4, 17, 17);
+ keyiter(k[10], r3, r2, r0, 18, 18);
+ keyiter(k[11], r4, r3, r1, 19, 19);
+ keyiter(k[12], r0, r4, r2, 20, 20);
+ keyiter(k[13], r1, r0, r3, 21, 21);
+ keyiter(k[14], r2, r1, r4, 22, 22);
+ keyiter(k[15], r3, r2, r0, 23, 23);
+ keyiter(k[16], r4, r3, r1, 24, 24);
+ keyiter(k[17], r0, r4, r2, 25, 25);
+ keyiter(k[18], r1, r0, r3, 26, 26);
+ keyiter(k[19], r2, r1, r4, 27, 27);
+ keyiter(k[20], r3, r2, r0, 28, 28);
+ keyiter(k[21], r4, r3, r1, 29, 29);
+ keyiter(k[22], r0, r4, r2, 30, 30);
+ keyiter(k[23], r1, r0, r3, 31, 31);
+
+ k += 50;
+
+ keyiter(k[-26], r2, r1, r4, 32, -18);
+ keyiter(k[-25], r3, r2, r0, 33, -17);
+ keyiter(k[-24], r4, r3, r1, 34, -16);
+ keyiter(k[-23], r0, r4, r2, 35, -15);
+ keyiter(k[-22], r1, r0, r3, 36, -14);
+ keyiter(k[-21], r2, r1, r4, 37, -13);
+ keyiter(k[-20], r3, r2, r0, 38, -12);
+ keyiter(k[-19], r4, r3, r1, 39, -11);
+ keyiter(k[-18], r0, r4, r2, 40, -10);
+ keyiter(k[-17], r1, r0, r3, 41, -9);
+ keyiter(k[-16], r2, r1, r4, 42, -8);
+ keyiter(k[-15], r3, r2, r0, 43, -7);
+ keyiter(k[-14], r4, r3, r1, 44, -6);
+ keyiter(k[-13], r0, r4, r2, 45, -5);
+ keyiter(k[-12], r1, r0, r3, 46, -4);
+ keyiter(k[-11], r2, r1, r4, 47, -3);
+ keyiter(k[-10], r3, r2, r0, 48, -2);
+ keyiter(k[-9], r4, r3, r1, 49, -1);
+ keyiter(k[-8], r0, r4, r2, 50, 0);
+ keyiter(k[-7], r1, r0, r3, 51, 1);
+ keyiter(k[-6], r2, r1, r4, 52, 2);
+ keyiter(k[-5], r3, r2, r0, 53, 3);
+ keyiter(k[-4], r4, r3, r1, 54, 4);
+ keyiter(k[-3], r0, r4, r2, 55, 5);
+ keyiter(k[-2], r1, r0, r3, 56, 6);
+ keyiter(k[-1], r2, r1, r4, 57, 7);
+ keyiter(k[0], r3, r2, r0, 58, 8);
+ keyiter(k[1], r4, r3, r1, 59, 9);
+ keyiter(k[2], r0, r4, r2, 60, 10);
+ keyiter(k[3], r1, r0, r3, 61, 11);
+ keyiter(k[4], r2, r1, r4, 62, 12);
+ keyiter(k[5], r3, r2, r0, 63, 13);
+ keyiter(k[6], r4, r3, r1, 64, 14);
+ keyiter(k[7], r0, r4, r2, 65, 15);
+ keyiter(k[8], r1, r0, r3, 66, 16);
+ keyiter(k[9], r2, r1, r4, 67, 17);
+ keyiter(k[10], r3, r2, r0, 68, 18);
+ keyiter(k[11], r4, r3, r1, 69, 19);
+ keyiter(k[12], r0, r4, r2, 70, 20);
+ keyiter(k[13], r1, r0, r3, 71, 21);
+ keyiter(k[14], r2, r1, r4, 72, 22);
+ keyiter(k[15], r3, r2, r0, 73, 23);
+ keyiter(k[16], r4, r3, r1, 74, 24);
+ keyiter(k[17], r0, r4, r2, 75, 25);
+ keyiter(k[18], r1, r0, r3, 76, 26);
+ keyiter(k[19], r2, r1, r4, 77, 27);
+ keyiter(k[20], r3, r2, r0, 78, 28);
+ keyiter(k[21], r4, r3, r1, 79, 29);
+ keyiter(k[22], r0, r4, r2, 80, 30);
+ keyiter(k[23], r1, r0, r3, 81, 31);
+
+ k += 50;
+
+ keyiter(k[-26], r2, r1, r4, 82, -18);
+ keyiter(k[-25], r3, r2, r0, 83, -17);
+ keyiter(k[-24], r4, r3, r1, 84, -16);
+ keyiter(k[-23], r0, r4, r2, 85, -15);
+ keyiter(k[-22], r1, r0, r3, 86, -14);
+ keyiter(k[-21], r2, r1, r4, 87, -13);
+ keyiter(k[-20], r3, r2, r0, 88, -12);
+ keyiter(k[-19], r4, r3, r1, 89, -11);
+ keyiter(k[-18], r0, r4, r2, 90, -10);
+ keyiter(k[-17], r1, r0, r3, 91, -9);
+ keyiter(k[-16], r2, r1, r4, 92, -8);
+ keyiter(k[-15], r3, r2, r0, 93, -7);
+ keyiter(k[-14], r4, r3, r1, 94, -6);
+ keyiter(k[-13], r0, r4, r2, 95, -5);
+ keyiter(k[-12], r1, r0, r3, 96, -4);
+ keyiter(k[-11], r2, r1, r4, 97, -3);
+ keyiter(k[-10], r3, r2, r0, 98, -2);
+ keyiter(k[-9], r4, r3, r1, 99, -1);
+ keyiter(k[-8], r0, r4, r2, 100, 0);
+ keyiter(k[-7], r1, r0, r3, 101, 1);
+ keyiter(k[-6], r2, r1, r4, 102, 2);
+ keyiter(k[-5], r3, r2, r0, 103, 3);
+ keyiter(k[-4], r4, r3, r1, 104, 4);
+ keyiter(k[-3], r0, r4, r2, 105, 5);
+ keyiter(k[-2], r1, r0, r3, 106, 6);
+ keyiter(k[-1], r2, r1, r4, 107, 7);
+ keyiter(k[0], r3, r2, r0, 108, 8);
+ keyiter(k[1], r4, r3, r1, 109, 9);
+ keyiter(k[2], r0, r4, r2, 110, 10);
+ keyiter(k[3], r1, r0, r3, 111, 11);
+ keyiter(k[4], r2, r1, r4, 112, 12);
+ keyiter(k[5], r3, r2, r0, 113, 13);
+ keyiter(k[6], r4, r3, r1, 114, 14);
+ keyiter(k[7], r0, r4, r2, 115, 15);
+ keyiter(k[8], r1, r0, r3, 116, 16);
+ keyiter(k[9], r2, r1, r4, 117, 17);
+ keyiter(k[10], r3, r2, r0, 118, 18);
+ keyiter(k[11], r4, r3, r1, 119, 19);
+ keyiter(k[12], r0, r4, r2, 120, 20);
+ keyiter(k[13], r1, r0, r3, 121, 21);
+ keyiter(k[14], r2, r1, r4, 122, 22);
+ keyiter(k[15], r3, r2, r0, 123, 23);
+ keyiter(k[16], r4, r3, r1, 124, 24);
+ keyiter(k[17], r0, r4, r2, 125, 25);
+ keyiter(k[18], r1, r0, r3, 126, 26);
+ keyiter(k[19], r2, r1, r4, 127, 27);
+ keyiter(k[20], r3, r2, r0, 128, 28);
+ keyiter(k[21], r4, r3, r1, 129, 29);
+ keyiter(k[22], r0, r4, r2, 130, 30);
+ keyiter(k[23], r1, r0, r3, 131, 31);
+
+ /* Apply S-boxes */
+
+ S3(r3, r4, r0, r1, r2); store_and_load_keys(r1, r2, r4, r3, 28, 24);
+ S4(r1, r2, r4, r3, r0); store_and_load_keys(r2, r4, r3, r0, 24, 20);
+ S5(r2, r4, r3, r0, r1); store_and_load_keys(r1, r2, r4, r0, 20, 16);
+ S6(r1, r2, r4, r0, r3); store_and_load_keys(r4, r3, r2, r0, 16, 12);
+ S7(r4, r3, r2, r0, r1); store_and_load_keys(r1, r2, r0, r4, 12, 8);
+ S0(r1, r2, r0, r4, r3); store_and_load_keys(r0, r2, r4, r1, 8, 4);
+ S1(r0, r2, r4, r1, r3); store_and_load_keys(r3, r4, r1, r0, 4, 0);
+ S2(r3, r4, r1, r0, r2); store_and_load_keys(r2, r4, r3, r0, 0, -4);
+ S3(r2, r4, r3, r0, r1); store_and_load_keys(r0, r1, r4, r2, -4, -8);
+ S4(r0, r1, r4, r2, r3); store_and_load_keys(r1, r4, r2, r3, -8, -12);
+ S5(r1, r4, r2, r3, r0); store_and_load_keys(r0, r1, r4, r3, -12, -16);
+ S6(r0, r1, r4, r3, r2); store_and_load_keys(r4, r2, r1, r3, -16, -20);
+ S7(r4, r2, r1, r3, r0); store_and_load_keys(r0, r1, r3, r4, -20, -24);
+ S0(r0, r1, r3, r4, r2); store_and_load_keys(r3, r1, r4, r0, -24, -28);
+ k -= 50;
+ S1(r3, r1, r4, r0, r2); store_and_load_keys(r2, r4, r0, r3, 22, 18);
+ S2(r2, r4, r0, r3, r1); store_and_load_keys(r1, r4, r2, r3, 18, 14);
+ S3(r1, r4, r2, r3, r0); store_and_load_keys(r3, r0, r4, r1, 14, 10);
+ S4(r3, r0, r4, r1, r2); store_and_load_keys(r0, r4, r1, r2, 10, 6);
+ S5(r0, r4, r1, r2, r3); store_and_load_keys(r3, r0, r4, r2, 6, 2);
+ S6(r3, r0, r4, r2, r1); store_and_load_keys(r4, r1, r0, r2, 2, -2);
+ S7(r4, r1, r0, r2, r3); store_and_load_keys(r3, r0, r2, r4, -2, -6);
+ S0(r3, r0, r2, r4, r1); store_and_load_keys(r2, r0, r4, r3, -6, -10);
+ S1(r2, r0, r4, r3, r1); store_and_load_keys(r1, r4, r3, r2, -10, -14);
+ S2(r1, r4, r3, r2, r0); store_and_load_keys(r0, r4, r1, r2, -14, -18);
+ S3(r0, r4, r1, r2, r3); store_and_load_keys(r2, r3, r4, r0, -18, -22);
+ k -= 50;
+ S4(r2, r3, r4, r0, r1); store_and_load_keys(r3, r4, r0, r1, 28, 24);
+ S5(r3, r4, r0, r1, r2); store_and_load_keys(r2, r3, r4, r1, 24, 20);
+ S6(r2, r3, r4, r1, r0); store_and_load_keys(r4, r0, r3, r1, 20, 16);
+ S7(r4, r0, r3, r1, r2); store_and_load_keys(r2, r3, r1, r4, 16, 12);
+ S0(r2, r3, r1, r4, r0); store_and_load_keys(r1, r3, r4, r2, 12, 8);
+ S1(r1, r3, r4, r2, r0); store_and_load_keys(r0, r4, r2, r1, 8, 4);
+ S2(r0, r4, r2, r1, r3); store_and_load_keys(r3, r4, r0, r1, 4, 0);
+ S3(r3, r4, r0, r1, r2); storekeys(r1, r2, r4, r3, 0);
+
+ return 0;
+}
+EXPORT_SYMBOL_GPL(__serpent_setkey);
+
+int serpent_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
+{
+ return __serpent_setkey(crypto_tfm_ctx(tfm), key, keylen);
+}
+EXPORT_SYMBOL_GPL(serpent_setkey);
+
+void __serpent_encrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src)
+{
+ const u32 *k = ctx->expkey;
+ const __le32 *s = (const __le32 *)src;
+ __le32 *d = (__le32 *)dst;
+ u32 r0, r1, r2, r3, r4;
+
+/*
+ * Note: The conversions between u8* and u32* might cause trouble
+ * on architectures with stricter alignment rules than x86
+ */
+
+ r0 = le32_to_cpu(s[0]);
+ r1 = le32_to_cpu(s[1]);
+ r2 = le32_to_cpu(s[2]);
+ r3 = le32_to_cpu(s[3]);
+
+ K(r0, r1, r2, r3, 0);
+ S0(r0, r1, r2, r3, r4); LK(r2, r1, r3, r0, r4, 1);
+ S1(r2, r1, r3, r0, r4); LK(r4, r3, r0, r2, r1, 2);
+ S2(r4, r3, r0, r2, r1); LK(r1, r3, r4, r2, r0, 3);
+ S3(r1, r3, r4, r2, r0); LK(r2, r0, r3, r1, r4, 4);
+ S4(r2, r0, r3, r1, r4); LK(r0, r3, r1, r4, r2, 5);
+ S5(r0, r3, r1, r4, r2); LK(r2, r0, r3, r4, r1, 6);
+ S6(r2, r0, r3, r4, r1); LK(r3, r1, r0, r4, r2, 7);
+ S7(r3, r1, r0, r4, r2); LK(r2, r0, r4, r3, r1, 8);
+ S0(r2, r0, r4, r3, r1); LK(r4, r0, r3, r2, r1, 9);
+ S1(r4, r0, r3, r2, r1); LK(r1, r3, r2, r4, r0, 10);
+ S2(r1, r3, r2, r4, r0); LK(r0, r3, r1, r4, r2, 11);
+ S3(r0, r3, r1, r4, r2); LK(r4, r2, r3, r0, r1, 12);
+ S4(r4, r2, r3, r0, r1); LK(r2, r3, r0, r1, r4, 13);
+ S5(r2, r3, r0, r1, r4); LK(r4, r2, r3, r1, r0, 14);
+ S6(r4, r2, r3, r1, r0); LK(r3, r0, r2, r1, r4, 15);
+ S7(r3, r0, r2, r1, r4); LK(r4, r2, r1, r3, r0, 16);
+ S0(r4, r2, r1, r3, r0); LK(r1, r2, r3, r4, r0, 17);
+ S1(r1, r2, r3, r4, r0); LK(r0, r3, r4, r1, r2, 18);
+ S2(r0, r3, r4, r1, r2); LK(r2, r3, r0, r1, r4, 19);
+ S3(r2, r3, r0, r1, r4); LK(r1, r4, r3, r2, r0, 20);
+ S4(r1, r4, r3, r2, r0); LK(r4, r3, r2, r0, r1, 21);
+ S5(r4, r3, r2, r0, r1); LK(r1, r4, r3, r0, r2, 22);
+ S6(r1, r4, r3, r0, r2); LK(r3, r2, r4, r0, r1, 23);
+ S7(r3, r2, r4, r0, r1); LK(r1, r4, r0, r3, r2, 24);
+ S0(r1, r4, r0, r3, r2); LK(r0, r4, r3, r1, r2, 25);
+ S1(r0, r4, r3, r1, r2); LK(r2, r3, r1, r0, r4, 26);
+ S2(r2, r3, r1, r0, r4); LK(r4, r3, r2, r0, r1, 27);
+ S3(r4, r3, r2, r0, r1); LK(r0, r1, r3, r4, r2, 28);
+ S4(r0, r1, r3, r4, r2); LK(r1, r3, r4, r2, r0, 29);
+ S5(r1, r3, r4, r2, r0); LK(r0, r1, r3, r2, r4, 30);
+ S6(r0, r1, r3, r2, r4); LK(r3, r4, r1, r2, r0, 31);
+ S7(r3, r4, r1, r2, r0); K(r0, r1, r2, r3, 32);
+
+ d[0] = cpu_to_le32(r0);
+ d[1] = cpu_to_le32(r1);
+ d[2] = cpu_to_le32(r2);
+ d[3] = cpu_to_le32(r3);
+}
+EXPORT_SYMBOL_GPL(__serpent_encrypt);
+
+static void serpent_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+{
+ struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ __serpent_encrypt(ctx, dst, src);
+}
+
+void __serpent_decrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src)
+{
+ const u32 *k = ctx->expkey;
+ const __le32 *s = (const __le32 *)src;
+ __le32 *d = (__le32 *)dst;
+ u32 r0, r1, r2, r3, r4;
+
+ r0 = le32_to_cpu(s[0]);
+ r1 = le32_to_cpu(s[1]);
+ r2 = le32_to_cpu(s[2]);
+ r3 = le32_to_cpu(s[3]);
+
+ K(r0, r1, r2, r3, 32);
+ SI7(r0, r1, r2, r3, r4); KL(r1, r3, r0, r4, r2, 31);
+ SI6(r1, r3, r0, r4, r2); KL(r0, r2, r4, r1, r3, 30);
+ SI5(r0, r2, r4, r1, r3); KL(r2, r3, r0, r4, r1, 29);
+ SI4(r2, r3, r0, r4, r1); KL(r2, r0, r1, r4, r3, 28);
+ SI3(r2, r0, r1, r4, r3); KL(r1, r2, r3, r4, r0, 27);
+ SI2(r1, r2, r3, r4, r0); KL(r2, r0, r4, r3, r1, 26);
+ SI1(r2, r0, r4, r3, r1); KL(r1, r0, r4, r3, r2, 25);
+ SI0(r1, r0, r4, r3, r2); KL(r4, r2, r0, r1, r3, 24);
+ SI7(r4, r2, r0, r1, r3); KL(r2, r1, r4, r3, r0, 23);
+ SI6(r2, r1, r4, r3, r0); KL(r4, r0, r3, r2, r1, 22);
+ SI5(r4, r0, r3, r2, r1); KL(r0, r1, r4, r3, r2, 21);
+ SI4(r0, r1, r4, r3, r2); KL(r0, r4, r2, r3, r1, 20);
+ SI3(r0, r4, r2, r3, r1); KL(r2, r0, r1, r3, r4, 19);
+ SI2(r2, r0, r1, r3, r4); KL(r0, r4, r3, r1, r2, 18);
+ SI1(r0, r4, r3, r1, r2); KL(r2, r4, r3, r1, r0, 17);
+ SI0(r2, r4, r3, r1, r0); KL(r3, r0, r4, r2, r1, 16);
+ SI7(r3, r0, r4, r2, r1); KL(r0, r2, r3, r1, r4, 15);
+ SI6(r0, r2, r3, r1, r4); KL(r3, r4, r1, r0, r2, 14);
+ SI5(r3, r4, r1, r0, r2); KL(r4, r2, r3, r1, r0, 13);
+ SI4(r4, r2, r3, r1, r0); KL(r4, r3, r0, r1, r2, 12);
+ SI3(r4, r3, r0, r1, r2); KL(r0, r4, r2, r1, r3, 11);
+ SI2(r0, r4, r2, r1, r3); KL(r4, r3, r1, r2, r0, 10);
+ SI1(r4, r3, r1, r2, r0); KL(r0, r3, r1, r2, r4, 9);
+ SI0(r0, r3, r1, r2, r4); KL(r1, r4, r3, r0, r2, 8);
+ SI7(r1, r4, r3, r0, r2); KL(r4, r0, r1, r2, r3, 7);
+ SI6(r4, r0, r1, r2, r3); KL(r1, r3, r2, r4, r0, 6);
+ SI5(r1, r3, r2, r4, r0); KL(r3, r0, r1, r2, r4, 5);
+ SI4(r3, r0, r1, r2, r4); KL(r3, r1, r4, r2, r0, 4);
+ SI3(r3, r1, r4, r2, r0); KL(r4, r3, r0, r2, r1, 3);
+ SI2(r4, r3, r0, r2, r1); KL(r3, r1, r2, r0, r4, 2);
+ SI1(r3, r1, r2, r0, r4); KL(r4, r1, r2, r0, r3, 1);
+ SI0(r4, r1, r2, r0, r3); K(r2, r3, r1, r4, 0);
+
+ d[0] = cpu_to_le32(r2);
+ d[1] = cpu_to_le32(r3);
+ d[2] = cpu_to_le32(r1);
+ d[3] = cpu_to_le32(r4);
+}
+EXPORT_SYMBOL_GPL(__serpent_decrypt);
+
+static void serpent_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+{
+ struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ __serpent_decrypt(ctx, dst, src);
+}
+
+static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ u8 rev_key[SERPENT_MAX_KEY_SIZE];
+ int i;
+
+ for (i = 0; i < keylen; ++i)
+ rev_key[keylen - i - 1] = key[i];
+
+ return serpent_setkey(tfm, rev_key, keylen);
+}
+
+static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+{
+ const u32 * const s = (const u32 * const)src;
+ u32 * const d = (u32 * const)dst;
+
+ u32 rs[4], rd[4];
+
+ rs[0] = swab32(s[3]);
+ rs[1] = swab32(s[2]);
+ rs[2] = swab32(s[1]);
+ rs[3] = swab32(s[0]);
+
+ serpent_encrypt(tfm, (u8 *)rd, (u8 *)rs);
+
+ d[0] = swab32(rd[3]);
+ d[1] = swab32(rd[2]);
+ d[2] = swab32(rd[1]);
+ d[3] = swab32(rd[0]);
+}
+
+static void tnepres_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+{
+ const u32 * const s = (const u32 * const)src;
+ u32 * const d = (u32 * const)dst;
+
+ u32 rs[4], rd[4];
+
+ rs[0] = swab32(s[3]);
+ rs[1] = swab32(s[2]);
+ rs[2] = swab32(s[1]);
+ rs[3] = swab32(s[0]);
+
+ serpent_decrypt(tfm, (u8 *)rd, (u8 *)rs);
+
+ d[0] = swab32(rd[3]);
+ d[1] = swab32(rd[2]);
+ d[2] = swab32(rd[1]);
+ d[3] = swab32(rd[0]);
+}
+
+static struct crypto_alg srp_algs[2] = { {
+ .cra_name = "serpent",
+ .cra_driver_name = "serpent-generic",
+ .cra_priority = 100,
+ .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
+ .cra_blocksize = SERPENT_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct serpent_ctx),
+ .cra_alignmask = 3,
+ .cra_module = THIS_MODULE,
+ .cra_u = { .cipher = {
+ .cia_min_keysize = SERPENT_MIN_KEY_SIZE,
+ .cia_max_keysize = SERPENT_MAX_KEY_SIZE,
+ .cia_setkey = serpent_setkey,
+ .cia_encrypt = serpent_encrypt,
+ .cia_decrypt = serpent_decrypt } }
+}, {
+ .cra_name = "tnepres",
+ .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
+ .cra_blocksize = SERPENT_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct serpent_ctx),
+ .cra_alignmask = 3,
+ .cra_module = THIS_MODULE,
+ .cra_u = { .cipher = {
+ .cia_min_keysize = SERPENT_MIN_KEY_SIZE,
+ .cia_max_keysize = SERPENT_MAX_KEY_SIZE,
+ .cia_setkey = tnepres_setkey,
+ .cia_encrypt = tnepres_encrypt,
+ .cia_decrypt = tnepres_decrypt } }
+} };
+
+static int __init serpent_mod_init(void)
+{
+ return crypto_register_algs(srp_algs, ARRAY_SIZE(srp_algs));
+}
+
+static void __exit serpent_mod_fini(void)
+{
+ crypto_unregister_algs(srp_algs, ARRAY_SIZE(srp_algs));
+}
+
+module_init(serpent_mod_init);
+module_exit(serpent_mod_fini);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Serpent and tnepres (kerneli compatible serpent reversed) Cipher Algorithm");
+MODULE_AUTHOR("Dag Arne Osvik <osvik@ii.uib.no>");
+MODULE_ALIAS("tnepres");
+MODULE_ALIAS("serpent");
diff --git a/crypto/sha1_generic.c b/crypto/sha1_generic.c
index 9efef20454c..42794803c48 100644
--- a/crypto/sha1_generic.c
+++ b/crypto/sha1_generic.c
@@ -25,53 +25,44 @@
#include <crypto/sha.h>
#include <asm/byteorder.h>
-struct sha1_ctx {
- u64 count;
- u32 state[5];
- u8 buffer[64];
-};
-
static int sha1_init(struct shash_desc *desc)
{
- struct sha1_ctx *sctx = shash_desc_ctx(desc);
+ struct sha1_state *sctx = shash_desc_ctx(desc);
- static const struct sha1_ctx initstate = {
- 0,
- { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
- { 0, }
+ *sctx = (struct sha1_state){
+ .state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
};
- *sctx = initstate;
-
return 0;
}
-static int sha1_update(struct shash_desc *desc, const u8 *data,
+int crypto_sha1_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
- struct sha1_ctx *sctx = shash_desc_ctx(desc);
+ struct sha1_state *sctx = shash_desc_ctx(desc);
unsigned int partial, done;
const u8 *src;
- partial = sctx->count & 0x3f;
+ partial = sctx->count % SHA1_BLOCK_SIZE;
sctx->count += len;
done = 0;
src = data;
- if ((partial + len) > 63) {
+ if ((partial + len) >= SHA1_BLOCK_SIZE) {
u32 temp[SHA_WORKSPACE_WORDS];
if (partial) {
done = -partial;
- memcpy(sctx->buffer + partial, data, done + 64);
+ memcpy(sctx->buffer + partial, data,
+ done + SHA1_BLOCK_SIZE);
src = sctx->buffer;
}
do {
sha_transform(sctx->state, src, temp);
- done += 64;
+ done += SHA1_BLOCK_SIZE;
src = data + done;
- } while (done + 63 < len);
+ } while (done + SHA1_BLOCK_SIZE <= len);
memset(temp, 0, sizeof(temp));
partial = 0;
@@ -80,12 +71,13 @@ static int sha1_update(struct shash_desc *desc, const u8 *data,
return 0;
}
+EXPORT_SYMBOL(crypto_sha1_update);
/* Add padding and return the message digest. */
static int sha1_final(struct shash_desc *desc, u8 *out)
{
- struct sha1_ctx *sctx = shash_desc_ctx(desc);
+ struct sha1_state *sctx = shash_desc_ctx(desc);
__be32 *dst = (__be32 *)out;
u32 i, index, padlen;
__be64 bits;
@@ -96,10 +88,10 @@ static int sha1_final(struct shash_desc *desc, u8 *out)
/* Pad out to 56 mod 64 */
index = sctx->count & 0x3f;
padlen = (index < 56) ? (56 - index) : ((64+56) - index);
- sha1_update(desc, padding, padlen);
+ crypto_sha1_update(desc, padding, padlen);
/* Append length */
- sha1_update(desc, (const u8 *)&bits, sizeof(bits));
+ crypto_sha1_update(desc, (const u8 *)&bits, sizeof(bits));
/* Store state in digest */
for (i = 0; i < 5; i++)
@@ -111,12 +103,31 @@ static int sha1_final(struct shash_desc *desc, u8 *out)
return 0;
}
+static int sha1_export(struct shash_desc *desc, void *out)
+{
+ struct sha1_state *sctx = shash_desc_ctx(desc);
+
+ memcpy(out, sctx, sizeof(*sctx));
+ return 0;
+}
+
+static int sha1_import(struct shash_desc *desc, const void *in)
+{
+ struct sha1_state *sctx = shash_desc_ctx(desc);
+
+ memcpy(sctx, in, sizeof(*sctx));
+ return 0;
+}
+
static struct shash_alg alg = {
.digestsize = SHA1_DIGEST_SIZE,
.init = sha1_init,
- .update = sha1_update,
+ .update = crypto_sha1_update,
.final = sha1_final,
- .descsize = sizeof(struct sha1_ctx),
+ .export = sha1_export,
+ .import = sha1_import,
+ .descsize = sizeof(struct sha1_state),
+ .statesize = sizeof(struct sha1_state),
.base = {
.cra_name = "sha1",
.cra_driver_name= "sha1-generic",
diff --git a/crypto/sha256_generic.c b/crypto/sha256_generic.c
index caa3542e6ce..54336677952 100644
--- a/crypto/sha256_generic.c
+++ b/crypto/sha256_generic.c
@@ -2,7 +2,7 @@
* Cryptographic API.
*
* SHA-256, as specified in
- * http://csrc.nist.gov/cryptval/shs/sha256-384-512.pdf
+ * http://csrc.nist.gov/groups/STM/cavp/documents/shs/sha256-384-512.pdf
*
* SHA-256 code by Jean-Luc Cooke <jlcooke@certainkey.com>.
*
@@ -25,12 +25,6 @@
#include <crypto/sha.h>
#include <asm/byteorder.h>
-struct sha256_ctx {
- u32 count[2];
- u32 state[8];
- u8 buf[128];
-};
-
static inline u32 Ch(u32 x, u32 y, u32 z)
{
return z ^ (x & (y ^ z));
@@ -222,7 +216,7 @@ static void sha256_transform(u32 *state, const u8 *input)
static int sha224_init(struct shash_desc *desc)
{
- struct sha256_ctx *sctx = shash_desc_ctx(desc);
+ struct sha256_state *sctx = shash_desc_ctx(desc);
sctx->state[0] = SHA224_H0;
sctx->state[1] = SHA224_H1;
sctx->state[2] = SHA224_H2;
@@ -231,15 +225,14 @@ static int sha224_init(struct shash_desc *desc)
sctx->state[5] = SHA224_H5;
sctx->state[6] = SHA224_H6;
sctx->state[7] = SHA224_H7;
- sctx->count[0] = 0;
- sctx->count[1] = 0;
+ sctx->count = 0;
return 0;
}
static int sha256_init(struct shash_desc *desc)
{
- struct sha256_ctx *sctx = shash_desc_ctx(desc);
+ struct sha256_state *sctx = shash_desc_ctx(desc);
sctx->state[0] = SHA256_H0;
sctx->state[1] = SHA256_H1;
sctx->state[2] = SHA256_H2;
@@ -248,66 +241,63 @@ static int sha256_init(struct shash_desc *desc)
sctx->state[5] = SHA256_H5;
sctx->state[6] = SHA256_H6;
sctx->state[7] = SHA256_H7;
- sctx->count[0] = sctx->count[1] = 0;
+ sctx->count = 0;
return 0;
}
-static int sha256_update(struct shash_desc *desc, const u8 *data,
+int crypto_sha256_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
- struct sha256_ctx *sctx = shash_desc_ctx(desc);
- unsigned int i, index, part_len;
-
- /* Compute number of bytes mod 128 */
- index = (unsigned int)((sctx->count[0] >> 3) & 0x3f);
-
- /* Update number of bits */
- if ((sctx->count[0] += (len << 3)) < (len << 3)) {
- sctx->count[1]++;
- sctx->count[1] += (len >> 29);
- }
-
- part_len = 64 - index;
-
- /* Transform as many times as possible. */
- if (len >= part_len) {
- memcpy(&sctx->buf[index], data, part_len);
- sha256_transform(sctx->state, sctx->buf);
-
- for (i = part_len; i + 63 < len; i += 64)
- sha256_transform(sctx->state, &data[i]);
- index = 0;
- } else {
- i = 0;
+ struct sha256_state *sctx = shash_desc_ctx(desc);
+ unsigned int partial, done;
+ const u8 *src;
+
+ partial = sctx->count & 0x3f;
+ sctx->count += len;
+ done = 0;
+ src = data;
+
+ if ((partial + len) > 63) {
+ if (partial) {
+ done = -partial;
+ memcpy(sctx->buf + partial, data, done + 64);
+ src = sctx->buf;
+ }
+
+ do {
+ sha256_transform(sctx->state, src);
+ done += 64;
+ src = data + done;
+ } while (done + 63 < len);
+
+ partial = 0;
}
-
- /* Buffer remaining input */
- memcpy(&sctx->buf[index], &data[i], len-i);
+ memcpy(sctx->buf + partial, src, len - done);
return 0;
}
+EXPORT_SYMBOL(crypto_sha256_update);
static int sha256_final(struct shash_desc *desc, u8 *out)
{
- struct sha256_ctx *sctx = shash_desc_ctx(desc);
+ struct sha256_state *sctx = shash_desc_ctx(desc);
__be32 *dst = (__be32 *)out;
- __be32 bits[2];
+ __be64 bits;
unsigned int index, pad_len;
int i;
static const u8 padding[64] = { 0x80, };
/* Save number of bits */
- bits[1] = cpu_to_be32(sctx->count[0]);
- bits[0] = cpu_to_be32(sctx->count[1]);
+ bits = cpu_to_be64(sctx->count << 3);
/* Pad out to 56 mod 64. */
- index = (sctx->count[0] >> 3) & 0x3f;
+ index = sctx->count & 0x3f;
pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
- sha256_update(desc, padding, pad_len);
+ crypto_sha256_update(desc, padding, pad_len);
/* Append length (before padding) */
- sha256_update(desc, (const u8 *)bits, sizeof(bits));
+ crypto_sha256_update(desc, (const u8 *)&bits, sizeof(bits));
/* Store state in digest */
for (i = 0; i < 8; i++)
@@ -331,12 +321,31 @@ static int sha224_final(struct shash_desc *desc, u8 *hash)
return 0;
}
-static struct shash_alg sha256 = {
+static int sha256_export(struct shash_desc *desc, void *out)
+{
+ struct sha256_state *sctx = shash_desc_ctx(desc);
+
+ memcpy(out, sctx, sizeof(*sctx));
+ return 0;
+}
+
+static int sha256_import(struct shash_desc *desc, const void *in)
+{
+ struct sha256_state *sctx = shash_desc_ctx(desc);
+
+ memcpy(sctx, in, sizeof(*sctx));
+ return 0;
+}
+
+static struct shash_alg sha256_algs[2] = { {
.digestsize = SHA256_DIGEST_SIZE,
.init = sha256_init,
- .update = sha256_update,
+ .update = crypto_sha256_update,
.final = sha256_final,
- .descsize = sizeof(struct sha256_ctx),
+ .export = sha256_export,
+ .import = sha256_import,
+ .descsize = sizeof(struct sha256_state),
+ .statesize = sizeof(struct sha256_state),
.base = {
.cra_name = "sha256",
.cra_driver_name= "sha256-generic",
@@ -344,14 +353,12 @@ static struct shash_alg sha256 = {
.cra_blocksize = SHA256_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
-};
-
-static struct shash_alg sha224 = {
+}, {
.digestsize = SHA224_DIGEST_SIZE,
.init = sha224_init,
- .update = sha256_update,
+ .update = crypto_sha256_update,
.final = sha224_final,
- .descsize = sizeof(struct sha256_ctx),
+ .descsize = sizeof(struct sha256_state),
.base = {
.cra_name = "sha224",
.cra_driver_name= "sha224-generic",
@@ -359,29 +366,16 @@ static struct shash_alg sha224 = {
.cra_blocksize = SHA224_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
-};
+} };
static int __init sha256_generic_mod_init(void)
{
- int ret = 0;
-
- ret = crypto_register_shash(&sha224);
-
- if (ret < 0)
- return ret;
-
- ret = crypto_register_shash(&sha256);
-
- if (ret < 0)
- crypto_unregister_shash(&sha224);
-
- return ret;
+ return crypto_register_shashes(sha256_algs, ARRAY_SIZE(sha256_algs));
}
static void __exit sha256_generic_mod_fini(void)
{
- crypto_unregister_shash(&sha224);
- crypto_unregister_shash(&sha256);
+ crypto_unregister_shashes(sha256_algs, ARRAY_SIZE(sha256_algs));
}
module_init(sha256_generic_mod_init);
diff --git a/crypto/sha512_generic.c b/crypto/sha512_generic.c
index 3bea38d1224..6ed124f3ea0 100644
--- a/crypto/sha512_generic.c
+++ b/crypto/sha512_generic.c
@@ -21,14 +21,6 @@
#include <linux/percpu.h>
#include <asm/byteorder.h>
-struct sha512_ctx {
- u64 state[8];
- u32 count[4];
- u8 buf[128];
-};
-
-static DEFINE_PER_CPU(u64[80], msg_schedule);
-
static inline u64 Ch(u64 x, u64 y, u64 z)
{
return z ^ (x & (y ^ z));
@@ -39,11 +31,6 @@ static inline u64 Maj(u64 x, u64 y, u64 z)
return (x & y) | (z & (x | y));
}
-static inline u64 RORu64(u64 x, u64 y)
-{
- return (x >> y) | (x << (64 - y));
-}
-
static const u64 sha512_K[80] = {
0x428a2f98d728ae22ULL, 0x7137449123ef65cdULL, 0xb5c0fbcfec4d3b2fULL,
0xe9b5dba58189dbbcULL, 0x3956c25bf348b538ULL, 0x59f111f1b605d019ULL,
@@ -74,10 +61,10 @@ static const u64 sha512_K[80] = {
0x5fcb6fab3ad6faecULL, 0x6c44198c4a475817ULL,
};
-#define e0(x) (RORu64(x,28) ^ RORu64(x,34) ^ RORu64(x,39))
-#define e1(x) (RORu64(x,14) ^ RORu64(x,18) ^ RORu64(x,41))
-#define s0(x) (RORu64(x, 1) ^ RORu64(x, 8) ^ (x >> 7))
-#define s1(x) (RORu64(x,19) ^ RORu64(x,61) ^ (x >> 6))
+#define e0(x) (ror64(x,28) ^ ror64(x,34) ^ ror64(x,39))
+#define e1(x) (ror64(x,14) ^ ror64(x,18) ^ ror64(x,41))
+#define s0(x) (ror64(x, 1) ^ ror64(x, 8) ^ (x >> 7))
+#define s1(x) (ror64(x,19) ^ ror64(x,61) ^ (x >> 6))
static inline void LOAD_OP(int I, u64 *W, const u8 *input)
{
@@ -86,7 +73,7 @@ static inline void LOAD_OP(int I, u64 *W, const u8 *input)
static inline void BLEND_OP(int I, u64 *W)
{
- W[I] = s1(W[I-2]) + W[I-7] + s0(W[I-15]) + W[I-16];
+ W[I & 15] += s1(W[(I-2) & 15]) + W[(I-7) & 15] + s0(W[(I-15) & 15]);
}
static void
@@ -95,15 +82,7 @@ sha512_transform(u64 *state, const u8 *input)
u64 a, b, c, d, e, f, g, h, t1, t2;
int i;
- u64 *W = get_cpu_var(msg_schedule);
-
- /* load the input */
- for (i = 0; i < 16; i++)
- LOAD_OP(i, W, input);
-
- for (i = 16; i < 80; i++) {
- BLEND_OP(i, W);
- }
+ u64 W[16];
/* load the state into our registers */
a=state[0]; b=state[1]; c=state[2]; d=state[3];
@@ -111,21 +90,35 @@ sha512_transform(u64 *state, const u8 *input)
/* now iterate */
for (i=0; i<80; i+=8) {
- t1 = h + e1(e) + Ch(e,f,g) + sha512_K[i ] + W[i ];
+ if (!(i & 8)) {
+ int j;
+
+ if (i < 16) {
+ /* load the input */
+ for (j = 0; j < 16; j++)
+ LOAD_OP(i + j, W, input);
+ } else {
+ for (j = 0; j < 16; j++) {
+ BLEND_OP(i + j, W);
+ }
+ }
+ }
+
+ t1 = h + e1(e) + Ch(e,f,g) + sha512_K[i ] + W[(i & 15)];
t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + sha512_K[i+1] + W[i+1];
+ t1 = g + e1(d) + Ch(d,e,f) + sha512_K[i+1] + W[(i & 15) + 1];
t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + sha512_K[i+2] + W[i+2];
+ t1 = f + e1(c) + Ch(c,d,e) + sha512_K[i+2] + W[(i & 15) + 2];
t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + sha512_K[i+3] + W[i+3];
+ t1 = e + e1(b) + Ch(b,c,d) + sha512_K[i+3] + W[(i & 15) + 3];
t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + sha512_K[i+4] + W[i+4];
+ t1 = d + e1(a) + Ch(a,b,c) + sha512_K[i+4] + W[(i & 15) + 4];
t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + sha512_K[i+5] + W[i+5];
+ t1 = c + e1(h) + Ch(h,a,b) + sha512_K[i+5] + W[(i & 15) + 5];
t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + sha512_K[i+6] + W[i+6];
+ t1 = b + e1(g) + Ch(g,h,a) + sha512_K[i+6] + W[(i & 15) + 6];
t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + sha512_K[i+7] + W[i+7];
+ t1 = a + e1(f) + Ch(f,g,h) + sha512_K[i+7] + W[(i & 15) + 7];
t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
}
@@ -134,14 +127,12 @@ sha512_transform(u64 *state, const u8 *input)
/* erase our data */
a = b = c = d = e = f = g = h = t1 = t2 = 0;
- memset(W, 0, sizeof(__get_cpu_var(msg_schedule)));
- put_cpu_var(msg_schedule);
}
static int
sha512_init(struct shash_desc *desc)
{
- struct sha512_ctx *sctx = shash_desc_ctx(desc);
+ struct sha512_state *sctx = shash_desc_ctx(desc);
sctx->state[0] = SHA512_H0;
sctx->state[1] = SHA512_H1;
sctx->state[2] = SHA512_H2;
@@ -150,7 +141,7 @@ sha512_init(struct shash_desc *desc)
sctx->state[5] = SHA512_H5;
sctx->state[6] = SHA512_H6;
sctx->state[7] = SHA512_H7;
- sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0;
+ sctx->count[0] = sctx->count[1] = 0;
return 0;
}
@@ -158,7 +149,7 @@ sha512_init(struct shash_desc *desc)
static int
sha384_init(struct shash_desc *desc)
{
- struct sha512_ctx *sctx = shash_desc_ctx(desc);
+ struct sha512_state *sctx = shash_desc_ctx(desc);
sctx->state[0] = SHA384_H0;
sctx->state[1] = SHA384_H1;
sctx->state[2] = SHA384_H2;
@@ -167,28 +158,24 @@ sha384_init(struct shash_desc *desc)
sctx->state[5] = SHA384_H5;
sctx->state[6] = SHA384_H6;
sctx->state[7] = SHA384_H7;
- sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0;
+ sctx->count[0] = sctx->count[1] = 0;
return 0;
}
-static int
-sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len)
+int crypto_sha512_update(struct shash_desc *desc, const u8 *data,
+ unsigned int len)
{
- struct sha512_ctx *sctx = shash_desc_ctx(desc);
+ struct sha512_state *sctx = shash_desc_ctx(desc);
unsigned int i, index, part_len;
/* Compute number of bytes mod 128 */
- index = (unsigned int)((sctx->count[0] >> 3) & 0x7F);
-
- /* Update number of bits */
- if ((sctx->count[0] += (len << 3)) < (len << 3)) {
- if ((sctx->count[1] += 1) < 1)
- if ((sctx->count[2] += 1) < 1)
- sctx->count[3]++;
- sctx->count[1] += (len >> 29);
- }
+ index = sctx->count[0] & 0x7f;
+
+ /* Update number of bytes */
+ if ((sctx->count[0] += len) < len)
+ sctx->count[1]++;
part_len = 128 - index;
@@ -210,37 +197,36 @@ sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len)
return 0;
}
+EXPORT_SYMBOL(crypto_sha512_update);
static int
sha512_final(struct shash_desc *desc, u8 *hash)
{
- struct sha512_ctx *sctx = shash_desc_ctx(desc);
+ struct sha512_state *sctx = shash_desc_ctx(desc);
static u8 padding[128] = { 0x80, };
__be64 *dst = (__be64 *)hash;
- __be32 bits[4];
+ __be64 bits[2];
unsigned int index, pad_len;
int i;
/* Save number of bits */
- bits[3] = cpu_to_be32(sctx->count[0]);
- bits[2] = cpu_to_be32(sctx->count[1]);
- bits[1] = cpu_to_be32(sctx->count[2]);
- bits[0] = cpu_to_be32(sctx->count[3]);
+ bits[1] = cpu_to_be64(sctx->count[0] << 3);
+ bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61);
/* Pad out to 112 mod 128. */
- index = (sctx->count[0] >> 3) & 0x7f;
+ index = sctx->count[0] & 0x7f;
pad_len = (index < 112) ? (112 - index) : ((128+112) - index);
- sha512_update(desc, padding, pad_len);
+ crypto_sha512_update(desc, padding, pad_len);
/* Append length (before padding) */
- sha512_update(desc, (const u8 *)bits, sizeof(bits));
+ crypto_sha512_update(desc, (const u8 *)bits, sizeof(bits));
/* Store state in digest */
for (i = 0; i < 8; i++)
dst[i] = cpu_to_be64(sctx->state[i]);
/* Zeroize sensitive information. */
- memset(sctx, 0, sizeof(struct sha512_ctx));
+ memset(sctx, 0, sizeof(struct sha512_state));
return 0;
}
@@ -257,50 +243,42 @@ static int sha384_final(struct shash_desc *desc, u8 *hash)
return 0;
}
-static struct shash_alg sha512 = {
+static struct shash_alg sha512_algs[2] = { {
.digestsize = SHA512_DIGEST_SIZE,
.init = sha512_init,
- .update = sha512_update,
+ .update = crypto_sha512_update,
.final = sha512_final,
- .descsize = sizeof(struct sha512_ctx),
+ .descsize = sizeof(struct sha512_state),
.base = {
.cra_name = "sha512",
+ .cra_driver_name = "sha512-generic",
.cra_flags = CRYPTO_ALG_TYPE_SHASH,
.cra_blocksize = SHA512_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
-};
-
-static struct shash_alg sha384 = {
+}, {
.digestsize = SHA384_DIGEST_SIZE,
.init = sha384_init,
- .update = sha512_update,
+ .update = crypto_sha512_update,
.final = sha384_final,
- .descsize = sizeof(struct sha512_ctx),
+ .descsize = sizeof(struct sha512_state),
.base = {
.cra_name = "sha384",
+ .cra_driver_name = "sha384-generic",
.cra_flags = CRYPTO_ALG_TYPE_SHASH,
.cra_blocksize = SHA384_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
-};
+} };
static int __init sha512_generic_mod_init(void)
{
- int ret = 0;
-
- if ((ret = crypto_register_shash(&sha384)) < 0)
- goto out;
- if ((ret = crypto_register_shash(&sha512)) < 0)
- crypto_unregister_shash(&sha384);
-out:
- return ret;
+ return crypto_register_shashes(sha512_algs, ARRAY_SIZE(sha512_algs));
}
static void __exit sha512_generic_mod_fini(void)
{
- crypto_unregister_shash(&sha384);
- crypto_unregister_shash(&sha512);
+ crypto_unregister_shashes(sha512_algs, ARRAY_SIZE(sha512_algs));
}
module_init(sha512_generic_mod_init);
diff --git a/crypto/shash.c b/crypto/shash.c
index d5a2b619c55..47c713954bf 100644
--- a/crypto/shash.c
+++ b/crypto/shash.c
@@ -17,16 +17,19 @@
#include <linux/module.h>
#include <linux/slab.h>
#include <linux/seq_file.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
+
+#include "internal.h"
static const struct crypto_type crypto_shash_type;
-static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm)
+static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
+ unsigned int keylen)
{
- return container_of(tfm, struct crypto_shash, base);
+ return -ENOSYS;
}
-#include "internal.h"
-
static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
unsigned int keylen)
{
@@ -36,7 +39,7 @@ static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
u8 *buffer, *alignbuffer;
int err;
- absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1));
+ absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
buffer = kmalloc(absize, GFP_KERNEL);
if (!buffer)
return -ENOMEM;
@@ -44,8 +47,7 @@ static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
memcpy(alignbuffer, key, keylen);
err = shash->setkey(tfm, alignbuffer, keylen);
- memset(alignbuffer, 0, keylen);
- kfree(buffer);
+ kzfree(buffer);
return err;
}
@@ -55,9 +57,6 @@ int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
struct shash_alg *shash = crypto_shash_alg(tfm);
unsigned long alignmask = crypto_shash_alignmask(tfm);
- if (!shash->setkey)
- return -ENOSYS;
-
if ((unsigned long)key & alignmask)
return shash_setkey_unaligned(tfm, key, keylen);
@@ -68,7 +67,8 @@ EXPORT_SYMBOL_GPL(crypto_shash_setkey);
static inline unsigned int shash_align_buffer_size(unsigned len,
unsigned long mask)
{
- return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
+ typedef u8 __attribute__ ((aligned)) u8_aligned;
+ return len + (mask & ~(__alignof__(u8_aligned) - 1));
}
static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
@@ -79,12 +79,19 @@ static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
unsigned long alignmask = crypto_shash_alignmask(tfm);
unsigned int unaligned_len = alignmask + 1 -
((unsigned long)data & alignmask);
- u8 buf[shash_align_buffer_size(unaligned_len, alignmask)]
+ u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
__attribute__ ((aligned));
+ u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
+ int err;
+
+ if (unaligned_len > len)
+ unaligned_len = len;
memcpy(buf, data, unaligned_len);
+ err = shash->update(desc, buf, unaligned_len);
+ memset(buf, 0, unaligned_len);
- return shash->update(desc, buf, unaligned_len) ?:
+ return err ?:
shash->update(desc, data + unaligned_len, len - unaligned_len);
}
@@ -108,12 +115,19 @@ static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
unsigned long alignmask = crypto_shash_alignmask(tfm);
struct shash_alg *shash = crypto_shash_alg(tfm);
unsigned int ds = crypto_shash_digestsize(tfm);
- u8 buf[shash_align_buffer_size(ds, alignmask)]
+ u8 ubuf[shash_align_buffer_size(ds, alignmask)]
__attribute__ ((aligned));
+ u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
int err;
err = shash->final(desc, buf);
+ if (err)
+ goto out;
+
memcpy(out, buf, ds);
+
+out:
+ memset(buf, 0, ds);
return err;
}
@@ -144,8 +158,7 @@ int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
struct shash_alg *shash = crypto_shash_alg(tfm);
unsigned long alignmask = crypto_shash_alignmask(tfm);
- if (((unsigned long)data | (unsigned long)out) & alignmask ||
- !shash->finup)
+ if (((unsigned long)data | (unsigned long)out) & alignmask)
return shash_finup_unaligned(desc, data, len, out);
return shash->finup(desc, data, len, out);
@@ -156,8 +169,7 @@ static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{
return crypto_shash_init(desc) ?:
- crypto_shash_update(desc, data, len) ?:
- crypto_shash_final(desc, out);
+ crypto_shash_finup(desc, data, len, out);
}
int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
@@ -167,27 +179,24 @@ int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
struct shash_alg *shash = crypto_shash_alg(tfm);
unsigned long alignmask = crypto_shash_alignmask(tfm);
- if (((unsigned long)data | (unsigned long)out) & alignmask ||
- !shash->digest)
+ if (((unsigned long)data | (unsigned long)out) & alignmask)
return shash_digest_unaligned(desc, data, len, out);
return shash->digest(desc, data, len, out);
}
EXPORT_SYMBOL_GPL(crypto_shash_digest);
-int crypto_shash_import(struct shash_desc *desc, const u8 *in)
+static int shash_default_export(struct shash_desc *desc, void *out)
{
- struct crypto_shash *tfm = desc->tfm;
- struct shash_alg *alg = crypto_shash_alg(tfm);
-
- memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
-
- if (alg->reinit)
- alg->reinit(desc);
+ memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
+ return 0;
+}
+static int shash_default_import(struct shash_desc *desc, const void *in)
+{
+ memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
return 0;
}
-EXPORT_SYMBOL_GPL(crypto_shash_import);
static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
unsigned int keylen)
@@ -208,9 +217,8 @@ static int shash_async_init(struct ahash_request *req)
return crypto_shash_init(desc);
}
-static int shash_async_update(struct ahash_request *req)
+int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
{
- struct shash_desc *desc = ahash_request_ctx(req);
struct crypto_hash_walk walk;
int nbytes;
@@ -220,13 +228,51 @@ static int shash_async_update(struct ahash_request *req)
return nbytes;
}
+EXPORT_SYMBOL_GPL(shash_ahash_update);
+
+static int shash_async_update(struct ahash_request *req)
+{
+ return shash_ahash_update(req, ahash_request_ctx(req));
+}
static int shash_async_final(struct ahash_request *req)
{
return crypto_shash_final(ahash_request_ctx(req), req->result);
}
-static int shash_async_digest(struct ahash_request *req)
+int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
+{
+ struct crypto_hash_walk walk;
+ int nbytes;
+
+ nbytes = crypto_hash_walk_first(req, &walk);
+ if (!nbytes)
+ return crypto_shash_final(desc, req->result);
+
+ do {
+ nbytes = crypto_hash_walk_last(&walk) ?
+ crypto_shash_finup(desc, walk.data, nbytes,
+ req->result) :
+ crypto_shash_update(desc, walk.data, nbytes);
+ nbytes = crypto_hash_walk_done(&walk, nbytes);
+ } while (nbytes > 0);
+
+ return nbytes;
+}
+EXPORT_SYMBOL_GPL(shash_ahash_finup);
+
+static int shash_async_finup(struct ahash_request *req)
+{
+ struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
+ struct shash_desc *desc = ahash_request_ctx(req);
+
+ desc->tfm = *ctx;
+ desc->flags = req->base.flags;
+
+ return shash_ahash_finup(req, desc);
+}
+
+int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
{
struct scatterlist *sg = req->src;
unsigned int offset = sg->offset;
@@ -234,34 +280,46 @@ static int shash_async_digest(struct ahash_request *req)
int err;
if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
- struct crypto_shash **ctx =
- crypto_ahash_ctx(crypto_ahash_reqtfm(req));
- struct shash_desc *desc = ahash_request_ctx(req);
void *data;
- desc->tfm = *ctx;
- desc->flags = req->base.flags;
-
- data = crypto_kmap(sg_page(sg), 0);
+ data = kmap_atomic(sg_page(sg));
err = crypto_shash_digest(desc, data + offset, nbytes,
req->result);
- crypto_kunmap(data, 0);
+ kunmap_atomic(data);
crypto_yield(desc->flags);
- goto out;
- }
+ } else
+ err = crypto_shash_init(desc) ?:
+ shash_ahash_finup(req, desc);
- err = shash_async_init(req);
- if (err)
- goto out;
+ return err;
+}
+EXPORT_SYMBOL_GPL(shash_ahash_digest);
- err = shash_async_update(req);
- if (err)
- goto out;
+static int shash_async_digest(struct ahash_request *req)
+{
+ struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
+ struct shash_desc *desc = ahash_request_ctx(req);
- err = shash_async_final(req);
+ desc->tfm = *ctx;
+ desc->flags = req->base.flags;
-out:
- return err;
+ return shash_ahash_digest(req, desc);
+}
+
+static int shash_async_export(struct ahash_request *req, void *out)
+{
+ return crypto_shash_export(ahash_request_ctx(req), out);
+}
+
+static int shash_async_import(struct ahash_request *req, const void *in)
+{
+ struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
+ struct shash_desc *desc = ahash_request_ctx(req);
+
+ desc->tfm = *ctx;
+ desc->flags = req->base.flags;
+
+ return crypto_shash_import(desc, in);
}
static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
@@ -271,19 +329,18 @@ static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
crypto_free_shash(*ctx);
}
-static int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
+int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
{
struct crypto_alg *calg = tfm->__crt_alg;
struct shash_alg *alg = __crypto_shash_alg(calg);
- struct ahash_tfm *crt = &tfm->crt_ahash;
+ struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
struct crypto_shash *shash;
if (!crypto_mod_get(calg))
return -EAGAIN;
- shash = __crypto_shash_cast(crypto_create_tfm(
- calg, &crypto_shash_type));
+ shash = crypto_create_tfm(calg, &crypto_shash_type);
if (IS_ERR(shash)) {
crypto_mod_put(calg);
return PTR_ERR(shash);
@@ -294,11 +351,17 @@ static int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
crt->init = shash_async_init;
crt->update = shash_async_update;
- crt->final = shash_async_final;
+ crt->final = shash_async_final;
+ crt->finup = shash_async_finup;
crt->digest = shash_async_digest;
- crt->setkey = shash_async_setkey;
- crt->digestsize = alg->digestsize;
+ if (alg->setkey)
+ crt->setkey = shash_async_setkey;
+ if (alg->export)
+ crt->export = shash_async_export;
+ if (alg->import)
+ crt->import = shash_async_import;
+
crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
return 0;
@@ -307,14 +370,16 @@ static int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
unsigned int keylen)
{
- struct shash_desc *desc = crypto_hash_ctx(tfm);
+ struct shash_desc **descp = crypto_hash_ctx(tfm);
+ struct shash_desc *desc = *descp;
return crypto_shash_setkey(desc->tfm, key, keylen);
}
static int shash_compat_init(struct hash_desc *hdesc)
{
- struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
+ struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
+ struct shash_desc *desc = *descp;
desc->flags = hdesc->flags;
@@ -324,7 +389,8 @@ static int shash_compat_init(struct hash_desc *hdesc)
static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
unsigned int len)
{
- struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
+ struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
+ struct shash_desc *desc = *descp;
struct crypto_hash_walk walk;
int nbytes;
@@ -337,7 +403,9 @@ static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
{
- return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out);
+ struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
+
+ return crypto_shash_final(*descp, out);
}
static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
@@ -347,14 +415,15 @@ static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
int err;
if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
- struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
+ struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
+ struct shash_desc *desc = *descp;
void *data;
desc->flags = hdesc->flags;
- data = crypto_kmap(sg_page(sg), 0);
+ data = kmap_atomic(sg_page(sg));
err = crypto_shash_digest(desc, data + offset, nbytes, out);
- crypto_kunmap(data, 0);
+ kunmap_atomic(data);
crypto_yield(desc->flags);
goto out;
}
@@ -375,9 +444,11 @@ out:
static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
{
- struct shash_desc *desc= crypto_tfm_ctx(tfm);
+ struct shash_desc **descp = crypto_tfm_ctx(tfm);
+ struct shash_desc *desc = *descp;
crypto_free_shash(desc->tfm);
+ kzfree(desc);
}
static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
@@ -385,19 +456,27 @@ static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
struct hash_tfm *crt = &tfm->crt_hash;
struct crypto_alg *calg = tfm->__crt_alg;
struct shash_alg *alg = __crypto_shash_alg(calg);
- struct shash_desc *desc = crypto_tfm_ctx(tfm);
+ struct shash_desc **descp = crypto_tfm_ctx(tfm);
struct crypto_shash *shash;
+ struct shash_desc *desc;
if (!crypto_mod_get(calg))
return -EAGAIN;
- shash = __crypto_shash_cast(crypto_create_tfm(
- calg, &crypto_shash_type));
+ shash = crypto_create_tfm(calg, &crypto_shash_type);
if (IS_ERR(shash)) {
crypto_mod_put(calg);
return PTR_ERR(shash);
}
+ desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
+ GFP_KERNEL);
+ if (!desc) {
+ crypto_free_shash(shash);
+ return -ENOMEM;
+ }
+
+ *descp = desc;
desc->tfm = shash;
tfm->exit = crypto_exit_shash_ops_compat;
@@ -417,8 +496,6 @@ static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
switch (mask & CRYPTO_ALG_TYPE_MASK) {
case CRYPTO_ALG_TYPE_HASH_MASK:
return crypto_init_shash_ops_compat(tfm);
- case CRYPTO_ALG_TYPE_AHASH_MASK:
- return crypto_init_shash_ops_async(tfm);
}
return -EINVAL;
@@ -427,32 +504,53 @@ static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
u32 mask)
{
- struct shash_alg *salg = __crypto_shash_alg(alg);
-
switch (mask & CRYPTO_ALG_TYPE_MASK) {
case CRYPTO_ALG_TYPE_HASH_MASK:
- return sizeof(struct shash_desc) + salg->descsize;
- case CRYPTO_ALG_TYPE_AHASH_MASK:
- return sizeof(struct crypto_shash *);
+ return sizeof(struct shash_desc *);
}
return 0;
}
-static int crypto_shash_init_tfm(struct crypto_tfm *tfm,
- const struct crypto_type *frontend)
+static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
{
- if (frontend->type != CRYPTO_ALG_TYPE_SHASH)
- return -EINVAL;
+ struct crypto_shash *hash = __crypto_shash_cast(tfm);
+
+ hash->descsize = crypto_shash_alg(hash)->descsize;
return 0;
}
-static unsigned int crypto_shash_extsize(struct crypto_alg *alg,
- const struct crypto_type *frontend)
+static unsigned int crypto_shash_extsize(struct crypto_alg *alg)
{
return alg->cra_ctxsize;
}
+#ifdef CONFIG_NET
+static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_hash rhash;
+ struct shash_alg *salg = __crypto_shash_alg(alg);
+
+ strncpy(rhash.type, "shash", sizeof(rhash.type));
+
+ rhash.blocksize = alg->cra_blocksize;
+ rhash.digestsize = salg->digestsize;
+
+ if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
+ sizeof(struct crypto_report_hash), &rhash))
+ goto nla_put_failure;
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
@@ -462,7 +560,6 @@ static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
seq_printf(m, "type : shash\n");
seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
seq_printf(m, "digestsize : %u\n", salg->digestsize);
- seq_printf(m, "descsize : %u\n", salg->descsize);
}
static const struct crypto_type crypto_shash_type = {
@@ -473,6 +570,7 @@ static const struct crypto_type crypto_shash_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_shash_show,
#endif
+ .report = crypto_shash_report,
.maskclear = ~CRYPTO_ALG_TYPE_MASK,
.maskset = CRYPTO_ALG_TYPE_MASK,
.type = CRYPTO_ALG_TYPE_SHASH,
@@ -482,23 +580,47 @@ static const struct crypto_type crypto_shash_type = {
struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
u32 mask)
{
- return __crypto_shash_cast(
- crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask));
+ return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
}
EXPORT_SYMBOL_GPL(crypto_alloc_shash);
-int crypto_register_shash(struct shash_alg *alg)
+static int shash_prepare_alg(struct shash_alg *alg)
{
struct crypto_alg *base = &alg->base;
if (alg->digestsize > PAGE_SIZE / 8 ||
- alg->descsize > PAGE_SIZE / 8)
+ alg->descsize > PAGE_SIZE / 8 ||
+ alg->statesize > PAGE_SIZE / 8)
return -EINVAL;
base->cra_type = &crypto_shash_type;
base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
+ if (!alg->finup)
+ alg->finup = shash_finup_unaligned;
+ if (!alg->digest)
+ alg->digest = shash_digest_unaligned;
+ if (!alg->export) {
+ alg->export = shash_default_export;
+ alg->import = shash_default_import;
+ alg->statesize = alg->descsize;
+ }
+ if (!alg->setkey)
+ alg->setkey = shash_no_setkey;
+
+ return 0;
+}
+
+int crypto_register_shash(struct shash_alg *alg)
+{
+ struct crypto_alg *base = &alg->base;
+ int err;
+
+ err = shash_prepare_alg(alg);
+ if (err)
+ return err;
+
return crypto_register_alg(base);
}
EXPORT_SYMBOL_GPL(crypto_register_shash);
@@ -509,5 +631,80 @@ int crypto_unregister_shash(struct shash_alg *alg)
}
EXPORT_SYMBOL_GPL(crypto_unregister_shash);
+int crypto_register_shashes(struct shash_alg *algs, int count)
+{
+ int i, ret;
+
+ for (i = 0; i < count; i++) {
+ ret = crypto_register_shash(&algs[i]);
+ if (ret)
+ goto err;
+ }
+
+ return 0;
+
+err:
+ for (--i; i >= 0; --i)
+ crypto_unregister_shash(&algs[i]);
+
+ return ret;
+}
+EXPORT_SYMBOL_GPL(crypto_register_shashes);
+
+int crypto_unregister_shashes(struct shash_alg *algs, int count)
+{
+ int i, ret;
+
+ for (i = count - 1; i >= 0; --i) {
+ ret = crypto_unregister_shash(&algs[i]);
+ if (ret)
+ pr_err("Failed to unregister %s %s: %d\n",
+ algs[i].base.cra_driver_name,
+ algs[i].base.cra_name, ret);
+ }
+
+ return 0;
+}
+EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
+
+int shash_register_instance(struct crypto_template *tmpl,
+ struct shash_instance *inst)
+{
+ int err;
+
+ err = shash_prepare_alg(&inst->alg);
+ if (err)
+ return err;
+
+ return crypto_register_instance(tmpl, shash_crypto_instance(inst));
+}
+EXPORT_SYMBOL_GPL(shash_register_instance);
+
+void shash_free_instance(struct crypto_instance *inst)
+{
+ crypto_drop_spawn(crypto_instance_ctx(inst));
+ kfree(shash_instance(inst));
+}
+EXPORT_SYMBOL_GPL(shash_free_instance);
+
+int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
+ struct shash_alg *alg,
+ struct crypto_instance *inst)
+{
+ return crypto_init_spawn2(&spawn->base, &alg->base, inst,
+ &crypto_shash_type);
+}
+EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
+
+struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
+{
+ struct crypto_alg *alg;
+
+ alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
+ return IS_ERR(alg) ? ERR_CAST(alg) :
+ container_of(alg, struct shash_alg, base);
+}
+EXPORT_SYMBOL_GPL(shash_attr_alg);
+
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Synchronous cryptographic hash type");
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c
index 28a45a1e6f4..ba247cf3085 100644
--- a/crypto/tcrypt.c
+++ b/crypto/tcrypt.c
@@ -8,6 +8,13 @@
* Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
* Copyright (c) 2007 Nokia Siemens Networks
*
+ * Updated RFC4106 AES-GCM testing.
+ * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
+ * Adrian Hoban <adrian.hoban@intel.com>
+ * Gabriele Paoloni <gabriele.paoloni@intel.com>
+ * Tadeusz Struk (tadeusz.struk@intel.com)
+ * Copyright (c) 2010, Intel Corporation.
+ *
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
@@ -18,8 +25,8 @@
#include <crypto/hash.h>
#include <linux/err.h>
#include <linux/init.h>
+#include <linux/gfp.h>
#include <linux/module.h>
-#include <linux/slab.h>
#include <linux/scatterlist.h>
#include <linux/string.h>
#include <linux/moduleparam.h>
@@ -27,6 +34,7 @@
#include <linux/timex.h>
#include <linux/interrupt.h>
#include "tcrypt.h"
+#include "internal.h"
/*
* Need slab memory for testing (size in number of pages).
@@ -44,6 +52,9 @@
*/
static unsigned int sec;
+static char *alg = NULL;
+static u32 type;
+static u32 mask;
static int mode;
static char *tvmem[TVMEMSIZE];
@@ -53,7 +64,7 @@ static char *check[] = {
"cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
"khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
"camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
- "lzo", "cts", NULL
+ "lzo", "cts", "zlib", NULL
};
static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc,
@@ -86,7 +97,6 @@ static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
int ret = 0;
int i;
- local_bh_disable();
local_irq_disable();
/* Warm-up run. */
@@ -119,7 +129,75 @@ static int test_cipher_cycles(struct blkcipher_desc *desc, int enc,
out:
local_irq_enable();
- local_bh_enable();
+
+ if (ret == 0)
+ printk("1 operation in %lu cycles (%d bytes)\n",
+ (cycles + 4) / 8, blen);
+
+ return ret;
+}
+
+static int test_aead_jiffies(struct aead_request *req, int enc,
+ int blen, int sec)
+{
+ unsigned long start, end;
+ int bcount;
+ int ret;
+
+ for (start = jiffies, end = start + sec * HZ, bcount = 0;
+ time_before(jiffies, end); bcount++) {
+ if (enc)
+ ret = crypto_aead_encrypt(req);
+ else
+ ret = crypto_aead_decrypt(req);
+
+ if (ret)
+ return ret;
+ }
+
+ printk("%d operations in %d seconds (%ld bytes)\n",
+ bcount, sec, (long)bcount * blen);
+ return 0;
+}
+
+static int test_aead_cycles(struct aead_request *req, int enc, int blen)
+{
+ unsigned long cycles = 0;
+ int ret = 0;
+ int i;
+
+ local_irq_disable();
+
+ /* Warm-up run. */
+ for (i = 0; i < 4; i++) {
+ if (enc)
+ ret = crypto_aead_encrypt(req);
+ else
+ ret = crypto_aead_decrypt(req);
+
+ if (ret)
+ goto out;
+ }
+
+ /* The real thing. */
+ for (i = 0; i < 8; i++) {
+ cycles_t start, end;
+
+ start = get_cycles();
+ if (enc)
+ ret = crypto_aead_encrypt(req);
+ else
+ ret = crypto_aead_decrypt(req);
+ end = get_cycles();
+
+ if (ret)
+ goto out;
+
+ cycles += end - start;
+ }
+
+out:
+ local_irq_enable();
if (ret == 0)
printk("1 operation in %lu cycles (%d bytes)\n",
@@ -129,13 +207,211 @@ out:
}
static u32 block_sizes[] = { 16, 64, 256, 1024, 8192, 0 };
+static u32 aead_sizes[] = { 16, 64, 256, 512, 1024, 2048, 4096, 8192, 0 };
+
+#define XBUFSIZE 8
+#define MAX_IVLEN 32
+
+static int testmgr_alloc_buf(char *buf[XBUFSIZE])
+{
+ int i;
+
+ for (i = 0; i < XBUFSIZE; i++) {
+ buf[i] = (void *)__get_free_page(GFP_KERNEL);
+ if (!buf[i])
+ goto err_free_buf;
+ }
+
+ return 0;
+
+err_free_buf:
+ while (i-- > 0)
+ free_page((unsigned long)buf[i]);
+
+ return -ENOMEM;
+}
+
+static void testmgr_free_buf(char *buf[XBUFSIZE])
+{
+ int i;
+
+ for (i = 0; i < XBUFSIZE; i++)
+ free_page((unsigned long)buf[i]);
+}
+
+static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
+ unsigned int buflen)
+{
+ int np = (buflen + PAGE_SIZE - 1)/PAGE_SIZE;
+ int k, rem;
+
+ np = (np > XBUFSIZE) ? XBUFSIZE : np;
+ rem = buflen % PAGE_SIZE;
+ if (np > XBUFSIZE) {
+ rem = PAGE_SIZE;
+ np = XBUFSIZE;
+ }
+ sg_init_table(sg, np);
+ for (k = 0; k < np; ++k) {
+ if (k == (np-1))
+ sg_set_buf(&sg[k], xbuf[k], rem);
+ else
+ sg_set_buf(&sg[k], xbuf[k], PAGE_SIZE);
+ }
+}
+
+static void test_aead_speed(const char *algo, int enc, unsigned int sec,
+ struct aead_speed_template *template,
+ unsigned int tcount, u8 authsize,
+ unsigned int aad_size, u8 *keysize)
+{
+ unsigned int i, j;
+ struct crypto_aead *tfm;
+ int ret = -ENOMEM;
+ const char *key;
+ struct aead_request *req;
+ struct scatterlist *sg;
+ struct scatterlist *asg;
+ struct scatterlist *sgout;
+ const char *e;
+ void *assoc;
+ char iv[MAX_IVLEN];
+ char *xbuf[XBUFSIZE];
+ char *xoutbuf[XBUFSIZE];
+ char *axbuf[XBUFSIZE];
+ unsigned int *b_size;
+ unsigned int iv_len;
+
+ if (aad_size >= PAGE_SIZE) {
+ pr_err("associate data length (%u) too big\n", aad_size);
+ return;
+ }
+
+ if (enc == ENCRYPT)
+ e = "encryption";
+ else
+ e = "decryption";
+
+ if (testmgr_alloc_buf(xbuf))
+ goto out_noxbuf;
+ if (testmgr_alloc_buf(axbuf))
+ goto out_noaxbuf;
+ if (testmgr_alloc_buf(xoutbuf))
+ goto out_nooutbuf;
+
+ sg = kmalloc(sizeof(*sg) * 8 * 3, GFP_KERNEL);
+ if (!sg)
+ goto out_nosg;
+ asg = &sg[8];
+ sgout = &asg[8];
+
+
+ printk(KERN_INFO "\ntesting speed of %s %s\n", algo, e);
+
+ tfm = crypto_alloc_aead(algo, 0, 0);
+
+ if (IS_ERR(tfm)) {
+ pr_err("alg: aead: Failed to load transform for %s: %ld\n", algo,
+ PTR_ERR(tfm));
+ goto out_notfm;
+ }
+
+ req = aead_request_alloc(tfm, GFP_KERNEL);
+ if (!req) {
+ pr_err("alg: aead: Failed to allocate request for %s\n",
+ algo);
+ goto out_noreq;
+ }
+
+ i = 0;
+ do {
+ b_size = aead_sizes;
+ do {
+ assoc = axbuf[0];
+ memset(assoc, 0xff, aad_size);
+ sg_init_one(&asg[0], assoc, aad_size);
+
+ if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
+ pr_err("template (%u) too big for tvmem (%lu)\n",
+ *keysize + *b_size,
+ TVMEMSIZE * PAGE_SIZE);
+ goto out;
+ }
+
+ key = tvmem[0];
+ for (j = 0; j < tcount; j++) {
+ if (template[j].klen == *keysize) {
+ key = template[j].key;
+ break;
+ }
+ }
+ ret = crypto_aead_setkey(tfm, key, *keysize);
+ ret = crypto_aead_setauthsize(tfm, authsize);
+
+ iv_len = crypto_aead_ivsize(tfm);
+ if (iv_len)
+ memset(&iv, 0xff, iv_len);
+
+ crypto_aead_clear_flags(tfm, ~0);
+ printk(KERN_INFO "test %u (%d bit key, %d byte blocks): ",
+ i, *keysize * 8, *b_size);
+
+
+ memset(tvmem[0], 0xff, PAGE_SIZE);
+
+ if (ret) {
+ pr_err("setkey() failed flags=%x\n",
+ crypto_aead_get_flags(tfm));
+ goto out;
+ }
+
+ sg_init_aead(&sg[0], xbuf,
+ *b_size + (enc ? authsize : 0));
+
+ sg_init_aead(&sgout[0], xoutbuf,
+ *b_size + (enc ? authsize : 0));
+
+ aead_request_set_crypt(req, sg, sgout, *b_size, iv);
+ aead_request_set_assoc(req, asg, aad_size);
+
+ if (sec)
+ ret = test_aead_jiffies(req, enc, *b_size, sec);
+ else
+ ret = test_aead_cycles(req, enc, *b_size);
+
+ if (ret) {
+ pr_err("%s() failed return code=%d\n", e, ret);
+ break;
+ }
+ b_size++;
+ i++;
+ } while (*b_size);
+ keysize++;
+ } while (*keysize);
+
+out:
+ aead_request_free(req);
+out_noreq:
+ crypto_free_aead(tfm);
+out_notfm:
+ kfree(sg);
+out_nosg:
+ testmgr_free_buf(xoutbuf);
+out_nooutbuf:
+ testmgr_free_buf(axbuf);
+out_noaxbuf:
+ testmgr_free_buf(xbuf);
+out_noxbuf:
+ return;
+}
static void test_cipher_speed(const char *algo, int enc, unsigned int sec,
struct cipher_speed_template *template,
unsigned int tcount, u8 *keysize)
{
unsigned int ret, i, j, iv_len;
- const char *key, iv[128];
+ const char *key;
+ char iv[128];
struct crypto_blkcipher *tfm;
struct blkcipher_desc desc;
const char *e;
@@ -288,7 +564,6 @@ static int test_hash_cycles_digest(struct hash_desc *desc,
int i;
int ret;
- local_bh_disable();
local_irq_disable();
/* Warm-up run. */
@@ -315,7 +590,6 @@ static int test_hash_cycles_digest(struct hash_desc *desc,
out:
local_irq_enable();
- local_bh_enable();
if (ret)
return ret;
@@ -336,7 +610,6 @@ static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
if (plen == blen)
return test_hash_cycles_digest(desc, sg, blen, out);
- local_bh_disable();
local_irq_disable();
/* Warm-up run. */
@@ -379,7 +652,6 @@ static int test_hash_cycles(struct hash_desc *desc, struct scatterlist *sg,
out:
local_irq_enable();
- local_bh_enable();
if (ret)
return ret;
@@ -390,22 +662,33 @@ out:
return 0;
}
+static void test_hash_sg_init(struct scatterlist *sg)
+{
+ int i;
+
+ sg_init_table(sg, TVMEMSIZE);
+ for (i = 0; i < TVMEMSIZE; i++) {
+ sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
+ memset(tvmem[i], 0xff, PAGE_SIZE);
+ }
+}
+
static void test_hash_speed(const char *algo, unsigned int sec,
struct hash_speed *speed)
{
struct scatterlist sg[TVMEMSIZE];
struct crypto_hash *tfm;
struct hash_desc desc;
- char output[1024];
+ static char output[1024];
int i;
int ret;
- printk("\ntesting speed of %s\n", algo);
+ printk(KERN_INFO "\ntesting speed of %s\n", algo);
tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
if (IS_ERR(tfm)) {
- printk("failed to load transform for %s: %ld\n", algo,
+ printk(KERN_ERR "failed to load transform for %s: %ld\n", algo,
PTR_ERR(tfm));
return;
}
@@ -414,25 +697,25 @@ static void test_hash_speed(const char *algo, unsigned int sec,
desc.flags = 0;
if (crypto_hash_digestsize(tfm) > sizeof(output)) {
- printk("digestsize(%u) > outputbuffer(%zu)\n",
+ printk(KERN_ERR "digestsize(%u) > outputbuffer(%zu)\n",
crypto_hash_digestsize(tfm), sizeof(output));
goto out;
}
- sg_init_table(sg, TVMEMSIZE);
- for (i = 0; i < TVMEMSIZE; i++) {
- sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
- memset(tvmem[i], 0xff, PAGE_SIZE);
- }
-
+ test_hash_sg_init(sg);
for (i = 0; speed[i].blen != 0; i++) {
if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
- printk("template (%u) too big for tvmem (%lu)\n",
+ printk(KERN_ERR
+ "template (%u) too big for tvmem (%lu)\n",
speed[i].blen, TVMEMSIZE * PAGE_SIZE);
goto out;
}
- printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ",
+ if (speed[i].klen)
+ crypto_hash_setkey(tfm, tvmem[0], speed[i].klen);
+
+ printk(KERN_INFO "test%3u "
+ "(%5u byte blocks,%5u bytes per update,%4u updates): ",
i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
if (sec)
@@ -443,7 +726,7 @@ static void test_hash_speed(const char *algo, unsigned int sec,
speed[i].plen, output);
if (ret) {
- printk("hashing failed ret=%d\n", ret);
+ printk(KERN_ERR "hashing failed ret=%d\n", ret);
break;
}
}
@@ -452,6 +735,463 @@ out:
crypto_free_hash(tfm);
}
+struct tcrypt_result {
+ struct completion completion;
+ int err;
+};
+
+static void tcrypt_complete(struct crypto_async_request *req, int err)
+{
+ struct tcrypt_result *res = req->data;
+
+ if (err == -EINPROGRESS)
+ return;
+
+ res->err = err;
+ complete(&res->completion);
+}
+
+static inline int do_one_ahash_op(struct ahash_request *req, int ret)
+{
+ if (ret == -EINPROGRESS || ret == -EBUSY) {
+ struct tcrypt_result *tr = req->base.data;
+
+ ret = wait_for_completion_interruptible(&tr->completion);
+ if (!ret)
+ ret = tr->err;
+ reinit_completion(&tr->completion);
+ }
+ return ret;
+}
+
+static int test_ahash_jiffies_digest(struct ahash_request *req, int blen,
+ char *out, int sec)
+{
+ unsigned long start, end;
+ int bcount;
+ int ret;
+
+ for (start = jiffies, end = start + sec * HZ, bcount = 0;
+ time_before(jiffies, end); bcount++) {
+ ret = do_one_ahash_op(req, crypto_ahash_digest(req));
+ if (ret)
+ return ret;
+ }
+
+ printk("%6u opers/sec, %9lu bytes/sec\n",
+ bcount / sec, ((long)bcount * blen) / sec);
+
+ return 0;
+}
+
+static int test_ahash_jiffies(struct ahash_request *req, int blen,
+ int plen, char *out, int sec)
+{
+ unsigned long start, end;
+ int bcount, pcount;
+ int ret;
+
+ if (plen == blen)
+ return test_ahash_jiffies_digest(req, blen, out, sec);
+
+ for (start = jiffies, end = start + sec * HZ, bcount = 0;
+ time_before(jiffies, end); bcount++) {
+ ret = crypto_ahash_init(req);
+ if (ret)
+ return ret;
+ for (pcount = 0; pcount < blen; pcount += plen) {
+ ret = do_one_ahash_op(req, crypto_ahash_update(req));
+ if (ret)
+ return ret;
+ }
+ /* we assume there is enough space in 'out' for the result */
+ ret = do_one_ahash_op(req, crypto_ahash_final(req));
+ if (ret)
+ return ret;
+ }
+
+ pr_cont("%6u opers/sec, %9lu bytes/sec\n",
+ bcount / sec, ((long)bcount * blen) / sec);
+
+ return 0;
+}
+
+static int test_ahash_cycles_digest(struct ahash_request *req, int blen,
+ char *out)
+{
+ unsigned long cycles = 0;
+ int ret, i;
+
+ /* Warm-up run. */
+ for (i = 0; i < 4; i++) {
+ ret = do_one_ahash_op(req, crypto_ahash_digest(req));
+ if (ret)
+ goto out;
+ }
+
+ /* The real thing. */
+ for (i = 0; i < 8; i++) {
+ cycles_t start, end;
+
+ start = get_cycles();
+
+ ret = do_one_ahash_op(req, crypto_ahash_digest(req));
+ if (ret)
+ goto out;
+
+ end = get_cycles();
+
+ cycles += end - start;
+ }
+
+out:
+ if (ret)
+ return ret;
+
+ pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
+ cycles / 8, cycles / (8 * blen));
+
+ return 0;
+}
+
+static int test_ahash_cycles(struct ahash_request *req, int blen,
+ int plen, char *out)
+{
+ unsigned long cycles = 0;
+ int i, pcount, ret;
+
+ if (plen == blen)
+ return test_ahash_cycles_digest(req, blen, out);
+
+ /* Warm-up run. */
+ for (i = 0; i < 4; i++) {
+ ret = crypto_ahash_init(req);
+ if (ret)
+ goto out;
+ for (pcount = 0; pcount < blen; pcount += plen) {
+ ret = do_one_ahash_op(req, crypto_ahash_update(req));
+ if (ret)
+ goto out;
+ }
+ ret = do_one_ahash_op(req, crypto_ahash_final(req));
+ if (ret)
+ goto out;
+ }
+
+ /* The real thing. */
+ for (i = 0; i < 8; i++) {
+ cycles_t start, end;
+
+ start = get_cycles();
+
+ ret = crypto_ahash_init(req);
+ if (ret)
+ goto out;
+ for (pcount = 0; pcount < blen; pcount += plen) {
+ ret = do_one_ahash_op(req, crypto_ahash_update(req));
+ if (ret)
+ goto out;
+ }
+ ret = do_one_ahash_op(req, crypto_ahash_final(req));
+ if (ret)
+ goto out;
+
+ end = get_cycles();
+
+ cycles += end - start;
+ }
+
+out:
+ if (ret)
+ return ret;
+
+ pr_cont("%6lu cycles/operation, %4lu cycles/byte\n",
+ cycles / 8, cycles / (8 * blen));
+
+ return 0;
+}
+
+static void test_ahash_speed(const char *algo, unsigned int sec,
+ struct hash_speed *speed)
+{
+ struct scatterlist sg[TVMEMSIZE];
+ struct tcrypt_result tresult;
+ struct ahash_request *req;
+ struct crypto_ahash *tfm;
+ static char output[1024];
+ int i, ret;
+
+ printk(KERN_INFO "\ntesting speed of async %s\n", algo);
+
+ tfm = crypto_alloc_ahash(algo, 0, 0);
+ if (IS_ERR(tfm)) {
+ pr_err("failed to load transform for %s: %ld\n",
+ algo, PTR_ERR(tfm));
+ return;
+ }
+
+ if (crypto_ahash_digestsize(tfm) > sizeof(output)) {
+ pr_err("digestsize(%u) > outputbuffer(%zu)\n",
+ crypto_ahash_digestsize(tfm), sizeof(output));
+ goto out;
+ }
+
+ test_hash_sg_init(sg);
+ req = ahash_request_alloc(tfm, GFP_KERNEL);
+ if (!req) {
+ pr_err("ahash request allocation failure\n");
+ goto out;
+ }
+
+ init_completion(&tresult.completion);
+ ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
+ tcrypt_complete, &tresult);
+
+ for (i = 0; speed[i].blen != 0; i++) {
+ if (speed[i].blen > TVMEMSIZE * PAGE_SIZE) {
+ pr_err("template (%u) too big for tvmem (%lu)\n",
+ speed[i].blen, TVMEMSIZE * PAGE_SIZE);
+ break;
+ }
+
+ pr_info("test%3u "
+ "(%5u byte blocks,%5u bytes per update,%4u updates): ",
+ i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen);
+
+ ahash_request_set_crypt(req, sg, output, speed[i].plen);
+
+ if (sec)
+ ret = test_ahash_jiffies(req, speed[i].blen,
+ speed[i].plen, output, sec);
+ else
+ ret = test_ahash_cycles(req, speed[i].blen,
+ speed[i].plen, output);
+
+ if (ret) {
+ pr_err("hashing failed ret=%d\n", ret);
+ break;
+ }
+ }
+
+ ahash_request_free(req);
+
+out:
+ crypto_free_ahash(tfm);
+}
+
+static inline int do_one_acipher_op(struct ablkcipher_request *req, int ret)
+{
+ if (ret == -EINPROGRESS || ret == -EBUSY) {
+ struct tcrypt_result *tr = req->base.data;
+
+ ret = wait_for_completion_interruptible(&tr->completion);
+ if (!ret)
+ ret = tr->err;
+ reinit_completion(&tr->completion);
+ }
+
+ return ret;
+}
+
+static int test_acipher_jiffies(struct ablkcipher_request *req, int enc,
+ int blen, int sec)
+{
+ unsigned long start, end;
+ int bcount;
+ int ret;
+
+ for (start = jiffies, end = start + sec * HZ, bcount = 0;
+ time_before(jiffies, end); bcount++) {
+ if (enc)
+ ret = do_one_acipher_op(req,
+ crypto_ablkcipher_encrypt(req));
+ else
+ ret = do_one_acipher_op(req,
+ crypto_ablkcipher_decrypt(req));
+
+ if (ret)
+ return ret;
+ }
+
+ pr_cont("%d operations in %d seconds (%ld bytes)\n",
+ bcount, sec, (long)bcount * blen);
+ return 0;
+}
+
+static int test_acipher_cycles(struct ablkcipher_request *req, int enc,
+ int blen)
+{
+ unsigned long cycles = 0;
+ int ret = 0;
+ int i;
+
+ /* Warm-up run. */
+ for (i = 0; i < 4; i++) {
+ if (enc)
+ ret = do_one_acipher_op(req,
+ crypto_ablkcipher_encrypt(req));
+ else
+ ret = do_one_acipher_op(req,
+ crypto_ablkcipher_decrypt(req));
+
+ if (ret)
+ goto out;
+ }
+
+ /* The real thing. */
+ for (i = 0; i < 8; i++) {
+ cycles_t start, end;
+
+ start = get_cycles();
+ if (enc)
+ ret = do_one_acipher_op(req,
+ crypto_ablkcipher_encrypt(req));
+ else
+ ret = do_one_acipher_op(req,
+ crypto_ablkcipher_decrypt(req));
+ end = get_cycles();
+
+ if (ret)
+ goto out;
+
+ cycles += end - start;
+ }
+
+out:
+ if (ret == 0)
+ pr_cont("1 operation in %lu cycles (%d bytes)\n",
+ (cycles + 4) / 8, blen);
+
+ return ret;
+}
+
+static void test_acipher_speed(const char *algo, int enc, unsigned int sec,
+ struct cipher_speed_template *template,
+ unsigned int tcount, u8 *keysize)
+{
+ unsigned int ret, i, j, k, iv_len;
+ struct tcrypt_result tresult;
+ const char *key;
+ char iv[128];
+ struct ablkcipher_request *req;
+ struct crypto_ablkcipher *tfm;
+ const char *e;
+ u32 *b_size;
+
+ if (enc == ENCRYPT)
+ e = "encryption";
+ else
+ e = "decryption";
+
+ pr_info("\ntesting speed of async %s %s\n", algo, e);
+
+ init_completion(&tresult.completion);
+
+ tfm = crypto_alloc_ablkcipher(algo, 0, 0);
+
+ if (IS_ERR(tfm)) {
+ pr_err("failed to load transform for %s: %ld\n", algo,
+ PTR_ERR(tfm));
+ return;
+ }
+
+ req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
+ if (!req) {
+ pr_err("tcrypt: skcipher: Failed to allocate request for %s\n",
+ algo);
+ goto out;
+ }
+
+ ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
+ tcrypt_complete, &tresult);
+
+ i = 0;
+ do {
+ b_size = block_sizes;
+
+ do {
+ struct scatterlist sg[TVMEMSIZE];
+
+ if ((*keysize + *b_size) > TVMEMSIZE * PAGE_SIZE) {
+ pr_err("template (%u) too big for "
+ "tvmem (%lu)\n", *keysize + *b_size,
+ TVMEMSIZE * PAGE_SIZE);
+ goto out_free_req;
+ }
+
+ pr_info("test %u (%d bit key, %d byte blocks): ", i,
+ *keysize * 8, *b_size);
+
+ memset(tvmem[0], 0xff, PAGE_SIZE);
+
+ /* set key, plain text and IV */
+ key = tvmem[0];
+ for (j = 0; j < tcount; j++) {
+ if (template[j].klen == *keysize) {
+ key = template[j].key;
+ break;
+ }
+ }
+
+ crypto_ablkcipher_clear_flags(tfm, ~0);
+
+ ret = crypto_ablkcipher_setkey(tfm, key, *keysize);
+ if (ret) {
+ pr_err("setkey() failed flags=%x\n",
+ crypto_ablkcipher_get_flags(tfm));
+ goto out_free_req;
+ }
+
+ sg_init_table(sg, TVMEMSIZE);
+
+ k = *keysize + *b_size;
+ if (k > PAGE_SIZE) {
+ sg_set_buf(sg, tvmem[0] + *keysize,
+ PAGE_SIZE - *keysize);
+ k -= PAGE_SIZE;
+ j = 1;
+ while (k > PAGE_SIZE) {
+ sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
+ memset(tvmem[j], 0xff, PAGE_SIZE);
+ j++;
+ k -= PAGE_SIZE;
+ }
+ sg_set_buf(sg + j, tvmem[j], k);
+ memset(tvmem[j], 0xff, k);
+ } else {
+ sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
+ }
+
+ iv_len = crypto_ablkcipher_ivsize(tfm);
+ if (iv_len)
+ memset(&iv, 0xff, iv_len);
+
+ ablkcipher_request_set_crypt(req, sg, sg, *b_size, iv);
+
+ if (sec)
+ ret = test_acipher_jiffies(req, enc,
+ *b_size, sec);
+ else
+ ret = test_acipher_cycles(req, enc,
+ *b_size);
+
+ if (ret) {
+ pr_err("%s() failed flags=%x\n", e,
+ crypto_ablkcipher_get_flags(tfm));
+ break;
+ }
+ b_size++;
+ i++;
+ } while (*b_size);
+ keysize++;
+ } while (*keysize);
+
+out_free_req:
+ ablkcipher_request_free(req);
+out:
+ crypto_free_ablkcipher(tfm);
+}
+
static void test_available(void)
{
char **name = check;
@@ -466,237 +1206,349 @@ static void test_available(void)
static inline int tcrypt_test(const char *alg)
{
- return alg_test(alg, alg, 0, 0);
+ int ret;
+
+ ret = alg_test(alg, alg, 0, 0);
+ /* non-fips algs return -EINVAL in fips mode */
+ if (fips_enabled && ret == -EINVAL)
+ ret = 0;
+ return ret;
}
-static void do_test(int m)
+static int do_test(int m)
{
int i;
+ int ret = 0;
switch (m) {
case 0:
for (i = 1; i < 200; i++)
- do_test(i);
+ ret += do_test(i);
break;
case 1:
- tcrypt_test("md5");
+ ret += tcrypt_test("md5");
break;
case 2:
- tcrypt_test("sha1");
+ ret += tcrypt_test("sha1");
break;
case 3:
- tcrypt_test("ecb(des)");
- tcrypt_test("cbc(des)");
+ ret += tcrypt_test("ecb(des)");
+ ret += tcrypt_test("cbc(des)");
+ ret += tcrypt_test("ctr(des)");
break;
case 4:
- tcrypt_test("ecb(des3_ede)");
- tcrypt_test("cbc(des3_ede)");
+ ret += tcrypt_test("ecb(des3_ede)");
+ ret += tcrypt_test("cbc(des3_ede)");
+ ret += tcrypt_test("ctr(des3_ede)");
break;
case 5:
- tcrypt_test("md4");
+ ret += tcrypt_test("md4");
break;
case 6:
- tcrypt_test("sha256");
+ ret += tcrypt_test("sha256");
break;
case 7:
- tcrypt_test("ecb(blowfish)");
- tcrypt_test("cbc(blowfish)");
+ ret += tcrypt_test("ecb(blowfish)");
+ ret += tcrypt_test("cbc(blowfish)");
+ ret += tcrypt_test("ctr(blowfish)");
break;
case 8:
- tcrypt_test("ecb(twofish)");
- tcrypt_test("cbc(twofish)");
+ ret += tcrypt_test("ecb(twofish)");
+ ret += tcrypt_test("cbc(twofish)");
+ ret += tcrypt_test("ctr(twofish)");
+ ret += tcrypt_test("lrw(twofish)");
+ ret += tcrypt_test("xts(twofish)");
break;
case 9:
- tcrypt_test("ecb(serpent)");
+ ret += tcrypt_test("ecb(serpent)");
+ ret += tcrypt_test("cbc(serpent)");
+ ret += tcrypt_test("ctr(serpent)");
+ ret += tcrypt_test("lrw(serpent)");
+ ret += tcrypt_test("xts(serpent)");
break;
case 10:
- tcrypt_test("ecb(aes)");
- tcrypt_test("cbc(aes)");
- tcrypt_test("lrw(aes)");
- tcrypt_test("xts(aes)");
- tcrypt_test("rfc3686(ctr(aes))");
+ ret += tcrypt_test("ecb(aes)");
+ ret += tcrypt_test("cbc(aes)");
+ ret += tcrypt_test("lrw(aes)");
+ ret += tcrypt_test("xts(aes)");
+ ret += tcrypt_test("ctr(aes)");
+ ret += tcrypt_test("rfc3686(ctr(aes))");
break;
case 11:
- tcrypt_test("sha384");
+ ret += tcrypt_test("sha384");
break;
case 12:
- tcrypt_test("sha512");
+ ret += tcrypt_test("sha512");
break;
case 13:
- tcrypt_test("deflate");
+ ret += tcrypt_test("deflate");
break;
case 14:
- tcrypt_test("ecb(cast5)");
+ ret += tcrypt_test("ecb(cast5)");
+ ret += tcrypt_test("cbc(cast5)");
+ ret += tcrypt_test("ctr(cast5)");
break;
case 15:
- tcrypt_test("ecb(cast6)");
+ ret += tcrypt_test("ecb(cast6)");
+ ret += tcrypt_test("cbc(cast6)");
+ ret += tcrypt_test("ctr(cast6)");
+ ret += tcrypt_test("lrw(cast6)");
+ ret += tcrypt_test("xts(cast6)");
break;
case 16:
- tcrypt_test("ecb(arc4)");
+ ret += tcrypt_test("ecb(arc4)");
break;
case 17:
- tcrypt_test("michael_mic");
+ ret += tcrypt_test("michael_mic");
break;
case 18:
- tcrypt_test("crc32c");
+ ret += tcrypt_test("crc32c");
break;
case 19:
- tcrypt_test("ecb(tea)");
+ ret += tcrypt_test("ecb(tea)");
break;
case 20:
- tcrypt_test("ecb(xtea)");
+ ret += tcrypt_test("ecb(xtea)");
break;
case 21:
- tcrypt_test("ecb(khazad)");
+ ret += tcrypt_test("ecb(khazad)");
break;
case 22:
- tcrypt_test("wp512");
+ ret += tcrypt_test("wp512");
break;
case 23:
- tcrypt_test("wp384");
+ ret += tcrypt_test("wp384");
break;
case 24:
- tcrypt_test("wp256");
+ ret += tcrypt_test("wp256");
break;
case 25:
- tcrypt_test("ecb(tnepres)");
+ ret += tcrypt_test("ecb(tnepres)");
break;
case 26:
- tcrypt_test("ecb(anubis)");
- tcrypt_test("cbc(anubis)");
+ ret += tcrypt_test("ecb(anubis)");
+ ret += tcrypt_test("cbc(anubis)");
break;
case 27:
- tcrypt_test("tgr192");
+ ret += tcrypt_test("tgr192");
break;
case 28:
-
- tcrypt_test("tgr160");
+ ret += tcrypt_test("tgr160");
break;
case 29:
- tcrypt_test("tgr128");
+ ret += tcrypt_test("tgr128");
break;
case 30:
- tcrypt_test("ecb(xeta)");
+ ret += tcrypt_test("ecb(xeta)");
break;
case 31:
- tcrypt_test("pcbc(fcrypt)");
+ ret += tcrypt_test("pcbc(fcrypt)");
break;
case 32:
- tcrypt_test("ecb(camellia)");
- tcrypt_test("cbc(camellia)");
+ ret += tcrypt_test("ecb(camellia)");
+ ret += tcrypt_test("cbc(camellia)");
+ ret += tcrypt_test("ctr(camellia)");
+ ret += tcrypt_test("lrw(camellia)");
+ ret += tcrypt_test("xts(camellia)");
break;
+
case 33:
- tcrypt_test("sha224");
+ ret += tcrypt_test("sha224");
break;
case 34:
- tcrypt_test("salsa20");
+ ret += tcrypt_test("salsa20");
break;
case 35:
- tcrypt_test("gcm(aes)");
+ ret += tcrypt_test("gcm(aes)");
break;
case 36:
- tcrypt_test("lzo");
+ ret += tcrypt_test("lzo");
break;
case 37:
- tcrypt_test("ccm(aes)");
+ ret += tcrypt_test("ccm(aes)");
break;
case 38:
- tcrypt_test("cts(cbc(aes))");
+ ret += tcrypt_test("cts(cbc(aes))");
break;
case 39:
- tcrypt_test("rmd128");
+ ret += tcrypt_test("rmd128");
break;
case 40:
- tcrypt_test("rmd160");
+ ret += tcrypt_test("rmd160");
break;
case 41:
- tcrypt_test("rmd256");
+ ret += tcrypt_test("rmd256");
break;
case 42:
- tcrypt_test("rmd320");
+ ret += tcrypt_test("rmd320");
break;
case 43:
- tcrypt_test("ecb(seed)");
+ ret += tcrypt_test("ecb(seed)");
+ break;
+
+ case 44:
+ ret += tcrypt_test("zlib");
+ break;
+
+ case 45:
+ ret += tcrypt_test("rfc4309(ccm(aes))");
+ break;
+
+ case 46:
+ ret += tcrypt_test("ghash");
+ break;
+
+ case 47:
+ ret += tcrypt_test("crct10dif");
break;
case 100:
- tcrypt_test("hmac(md5)");
+ ret += tcrypt_test("hmac(md5)");
break;
case 101:
- tcrypt_test("hmac(sha1)");
+ ret += tcrypt_test("hmac(sha1)");
break;
case 102:
- tcrypt_test("hmac(sha256)");
+ ret += tcrypt_test("hmac(sha256)");
break;
case 103:
- tcrypt_test("hmac(sha384)");
+ ret += tcrypt_test("hmac(sha384)");
break;
case 104:
- tcrypt_test("hmac(sha512)");
+ ret += tcrypt_test("hmac(sha512)");
break;
case 105:
- tcrypt_test("hmac(sha224)");
+ ret += tcrypt_test("hmac(sha224)");
break;
case 106:
- tcrypt_test("xcbc(aes)");
+ ret += tcrypt_test("xcbc(aes)");
break;
case 107:
- tcrypt_test("hmac(rmd128)");
+ ret += tcrypt_test("hmac(rmd128)");
break;
case 108:
- tcrypt_test("hmac(rmd160)");
+ ret += tcrypt_test("hmac(rmd160)");
+ break;
+
+ case 109:
+ ret += tcrypt_test("vmac(aes)");
+ break;
+
+ case 110:
+ ret += tcrypt_test("hmac(crc32)");
+ break;
+
+ case 150:
+ ret += tcrypt_test("ansi_cprng");
+ break;
+
+ case 151:
+ ret += tcrypt_test("rfc4106(gcm(aes))");
+ break;
+
+ case 152:
+ ret += tcrypt_test("rfc4543(gcm(aes))");
+ break;
+
+ case 153:
+ ret += tcrypt_test("cmac(aes)");
break;
+ case 154:
+ ret += tcrypt_test("cmac(des3_ede)");
+ break;
+
+ case 155:
+ ret += tcrypt_test("authenc(hmac(sha1),cbc(aes))");
+ break;
+
+ case 156:
+ ret += tcrypt_test("authenc(hmac(md5),ecb(cipher_null))");
+ break;
+
+ case 157:
+ ret += tcrypt_test("authenc(hmac(sha1),ecb(cipher_null))");
+ break;
+ case 181:
+ ret += tcrypt_test("authenc(hmac(sha1),cbc(des))");
+ break;
+ case 182:
+ ret += tcrypt_test("authenc(hmac(sha1),cbc(des3_ede))");
+ break;
+ case 183:
+ ret += tcrypt_test("authenc(hmac(sha224),cbc(des))");
+ break;
+ case 184:
+ ret += tcrypt_test("authenc(hmac(sha224),cbc(des3_ede))");
+ break;
+ case 185:
+ ret += tcrypt_test("authenc(hmac(sha256),cbc(des))");
+ break;
+ case 186:
+ ret += tcrypt_test("authenc(hmac(sha256),cbc(des3_ede))");
+ break;
+ case 187:
+ ret += tcrypt_test("authenc(hmac(sha384),cbc(des))");
+ break;
+ case 188:
+ ret += tcrypt_test("authenc(hmac(sha384),cbc(des3_ede))");
+ break;
+ case 189:
+ ret += tcrypt_test("authenc(hmac(sha512),cbc(des))");
+ break;
+ case 190:
+ ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
+ break;
case 200:
test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
speed_template_16_24_32);
@@ -714,6 +1566,10 @@ static void do_test(int m)
speed_template_32_48_64);
test_cipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
speed_template_32_48_64);
+ test_cipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_cipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
break;
case 201:
@@ -740,6 +1596,18 @@ static void do_test(int m)
speed_template_16_24_32);
test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
speed_template_16_24_32);
+ test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_cipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_40_48);
+ test_cipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
+ speed_template_32_40_48);
+ test_cipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_48_64);
+ test_cipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
+ speed_template_32_48_64);
break;
case 203:
@@ -751,6 +1619,10 @@ static void do_test(int m)
speed_template_8_32);
test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
speed_template_8_32);
+ test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
+ speed_template_8_32);
+ test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
+ speed_template_8_32);
break;
case 204:
@@ -773,6 +1645,18 @@ static void do_test(int m)
speed_template_16_24_32);
test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
speed_template_16_24_32);
+ test_cipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_cipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_cipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_40_48);
+ test_cipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
+ speed_template_32_40_48);
+ test_cipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_48_64);
+ test_cipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
+ speed_template_32_48_64);
break;
case 206:
@@ -780,6 +1664,77 @@ static void do_test(int m)
speed_template_16_32);
break;
+ case 207:
+ test_cipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_cipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_cipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_cipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_cipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_cipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_cipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_48);
+ test_cipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
+ speed_template_32_48);
+ test_cipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_64);
+ test_cipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
+ speed_template_32_64);
+ break;
+
+ case 208:
+ test_cipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
+ speed_template_8);
+ break;
+
+ case 209:
+ test_cipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
+ speed_template_8_16);
+ test_cipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
+ speed_template_8_16);
+ test_cipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
+ speed_template_8_16);
+ test_cipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
+ speed_template_8_16);
+ test_cipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
+ speed_template_8_16);
+ test_cipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
+ speed_template_8_16);
+ break;
+
+ case 210:
+ test_cipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_cipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_cipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_cipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_cipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_cipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_cipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_48);
+ test_cipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
+ speed_template_32_48);
+ test_cipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_64);
+ test_cipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
+ speed_template_32_64);
+ break;
+
+ case 211:
+ test_aead_speed("rfc4106(gcm(aes))", ENCRYPT, sec,
+ NULL, 0, 16, 8, aead_speed_template_20);
+ break;
+
case 300:
/* fall through */
@@ -851,13 +1806,315 @@ static void do_test(int m)
test_hash_speed("rmd320", sec, generic_hash_speed_template);
if (mode > 300 && mode < 400) break;
+ case 318:
+ test_hash_speed("ghash-generic", sec, hash_speed_template_16);
+ if (mode > 300 && mode < 400) break;
+
+ case 319:
+ test_hash_speed("crc32c", sec, generic_hash_speed_template);
+ if (mode > 300 && mode < 400) break;
+
+ case 320:
+ test_hash_speed("crct10dif", sec, generic_hash_speed_template);
+ if (mode > 300 && mode < 400) break;
+
case 399:
break;
+ case 400:
+ /* fall through */
+
+ case 401:
+ test_ahash_speed("md4", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 402:
+ test_ahash_speed("md5", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 403:
+ test_ahash_speed("sha1", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 404:
+ test_ahash_speed("sha256", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 405:
+ test_ahash_speed("sha384", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 406:
+ test_ahash_speed("sha512", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 407:
+ test_ahash_speed("wp256", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 408:
+ test_ahash_speed("wp384", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 409:
+ test_ahash_speed("wp512", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 410:
+ test_ahash_speed("tgr128", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 411:
+ test_ahash_speed("tgr160", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 412:
+ test_ahash_speed("tgr192", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 413:
+ test_ahash_speed("sha224", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 414:
+ test_ahash_speed("rmd128", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 415:
+ test_ahash_speed("rmd160", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 416:
+ test_ahash_speed("rmd256", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 417:
+ test_ahash_speed("rmd320", sec, generic_hash_speed_template);
+ if (mode > 400 && mode < 500) break;
+
+ case 499:
+ break;
+
+ case 500:
+ test_acipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("lrw(aes)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_40_48);
+ test_acipher_speed("lrw(aes)", DECRYPT, sec, NULL, 0,
+ speed_template_32_40_48);
+ test_acipher_speed("xts(aes)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_48_64);
+ test_acipher_speed("xts(aes)", DECRYPT, sec, NULL, 0,
+ speed_template_32_48_64);
+ test_acipher_speed("ctr(aes)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("ctr(aes)", DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("cfb(aes)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("cfb(aes)", DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("ofb(aes)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("ofb(aes)", DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("rfc3686(ctr(aes))", ENCRYPT, sec, NULL, 0,
+ speed_template_20_28_36);
+ test_acipher_speed("rfc3686(ctr(aes))", DECRYPT, sec, NULL, 0,
+ speed_template_20_28_36);
+ break;
+
+ case 501:
+ test_acipher_speed("ecb(des3_ede)", ENCRYPT, sec,
+ des3_speed_template, DES3_SPEED_VECTORS,
+ speed_template_24);
+ test_acipher_speed("ecb(des3_ede)", DECRYPT, sec,
+ des3_speed_template, DES3_SPEED_VECTORS,
+ speed_template_24);
+ test_acipher_speed("cbc(des3_ede)", ENCRYPT, sec,
+ des3_speed_template, DES3_SPEED_VECTORS,
+ speed_template_24);
+ test_acipher_speed("cbc(des3_ede)", DECRYPT, sec,
+ des3_speed_template, DES3_SPEED_VECTORS,
+ speed_template_24);
+ test_acipher_speed("cfb(des3_ede)", ENCRYPT, sec,
+ des3_speed_template, DES3_SPEED_VECTORS,
+ speed_template_24);
+ test_acipher_speed("cfb(des3_ede)", DECRYPT, sec,
+ des3_speed_template, DES3_SPEED_VECTORS,
+ speed_template_24);
+ test_acipher_speed("ofb(des3_ede)", ENCRYPT, sec,
+ des3_speed_template, DES3_SPEED_VECTORS,
+ speed_template_24);
+ test_acipher_speed("ofb(des3_ede)", DECRYPT, sec,
+ des3_speed_template, DES3_SPEED_VECTORS,
+ speed_template_24);
+ break;
+
+ case 502:
+ test_acipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
+ speed_template_8);
+ test_acipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
+ speed_template_8);
+ test_acipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
+ speed_template_8);
+ test_acipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
+ speed_template_8);
+ test_acipher_speed("cfb(des)", ENCRYPT, sec, NULL, 0,
+ speed_template_8);
+ test_acipher_speed("cfb(des)", DECRYPT, sec, NULL, 0,
+ speed_template_8);
+ test_acipher_speed("ofb(des)", ENCRYPT, sec, NULL, 0,
+ speed_template_8);
+ test_acipher_speed("ofb(des)", DECRYPT, sec, NULL, 0,
+ speed_template_8);
+ break;
+
+ case 503:
+ test_acipher_speed("ecb(serpent)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("ecb(serpent)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("cbc(serpent)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("cbc(serpent)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("ctr(serpent)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("ctr(serpent)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("lrw(serpent)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_48);
+ test_acipher_speed("lrw(serpent)", DECRYPT, sec, NULL, 0,
+ speed_template_32_48);
+ test_acipher_speed("xts(serpent)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_64);
+ test_acipher_speed("xts(serpent)", DECRYPT, sec, NULL, 0,
+ speed_template_32_64);
+ break;
+
+ case 504:
+ test_acipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_acipher_speed("lrw(twofish)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_40_48);
+ test_acipher_speed("lrw(twofish)", DECRYPT, sec, NULL, 0,
+ speed_template_32_40_48);
+ test_acipher_speed("xts(twofish)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_48_64);
+ test_acipher_speed("xts(twofish)", DECRYPT, sec, NULL, 0,
+ speed_template_32_48_64);
+ break;
+
+ case 505:
+ test_acipher_speed("ecb(arc4)", ENCRYPT, sec, NULL, 0,
+ speed_template_8);
+ break;
+
+ case 506:
+ test_acipher_speed("ecb(cast5)", ENCRYPT, sec, NULL, 0,
+ speed_template_8_16);
+ test_acipher_speed("ecb(cast5)", DECRYPT, sec, NULL, 0,
+ speed_template_8_16);
+ test_acipher_speed("cbc(cast5)", ENCRYPT, sec, NULL, 0,
+ speed_template_8_16);
+ test_acipher_speed("cbc(cast5)", DECRYPT, sec, NULL, 0,
+ speed_template_8_16);
+ test_acipher_speed("ctr(cast5)", ENCRYPT, sec, NULL, 0,
+ speed_template_8_16);
+ test_acipher_speed("ctr(cast5)", DECRYPT, sec, NULL, 0,
+ speed_template_8_16);
+ break;
+
+ case 507:
+ test_acipher_speed("ecb(cast6)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("ecb(cast6)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("cbc(cast6)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("cbc(cast6)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("ctr(cast6)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("ctr(cast6)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("lrw(cast6)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_48);
+ test_acipher_speed("lrw(cast6)", DECRYPT, sec, NULL, 0,
+ speed_template_32_48);
+ test_acipher_speed("xts(cast6)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_64);
+ test_acipher_speed("xts(cast6)", DECRYPT, sec, NULL, 0,
+ speed_template_32_64);
+ break;
+
+ case 508:
+ test_acipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("ctr(camellia)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("ctr(camellia)", DECRYPT, sec, NULL, 0,
+ speed_template_16_32);
+ test_acipher_speed("lrw(camellia)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_48);
+ test_acipher_speed("lrw(camellia)", DECRYPT, sec, NULL, 0,
+ speed_template_32_48);
+ test_acipher_speed("xts(camellia)", ENCRYPT, sec, NULL, 0,
+ speed_template_32_64);
+ test_acipher_speed("xts(camellia)", DECRYPT, sec, NULL, 0,
+ speed_template_32_64);
+ break;
+
+ case 509:
+ test_acipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
+ speed_template_8_32);
+ test_acipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
+ speed_template_8_32);
+ test_acipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
+ speed_template_8_32);
+ test_acipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
+ speed_template_8_32);
+ test_acipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
+ speed_template_8_32);
+ test_acipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
+ speed_template_8_32);
+ break;
+
case 1000:
test_available();
break;
}
+
+ return ret;
+}
+
+static int do_alg_test(const char *alg, u32 type, u32 mask)
+{
+ return crypto_has_alg(alg, type, mask ?: CRYPTO_ALG_TYPE_MASK) ?
+ 0 : -ENOENT;
}
static int __init tcrypt_mod_init(void)
@@ -871,15 +2128,25 @@ static int __init tcrypt_mod_init(void)
goto err_free_tv;
}
- do_test(mode);
+ if (alg)
+ err = do_alg_test(alg, type, mask);
+ else
+ err = do_test(mode);
+
+ if (err) {
+ printk(KERN_ERR "tcrypt: one or more tests failed!\n");
+ goto err_free_tv;
+ }
- /* We intentionaly return -EAGAIN to prevent keeping
- * the module. It does all its work from init()
- * and doesn't offer any runtime functionality
+ /* We intentionaly return -EAGAIN to prevent keeping the module,
+ * unless we're running in fips mode. It does all its work from
+ * init() and doesn't offer any runtime functionality, but in
+ * the fips case, checking for a successful load is helpful.
* => we don't need it in the memory, do we?
* -- mludvig
*/
- err = -EAGAIN;
+ if (!fips_enabled)
+ err = -EAGAIN;
err_free_tv:
for (i = 0; i < TVMEMSIZE && tvmem[i]; i++)
@@ -897,6 +2164,9 @@ static void __exit tcrypt_mod_fini(void) { }
module_init(tcrypt_mod_init);
module_exit(tcrypt_mod_fini);
+module_param(alg, charp, 0);
+module_param(type, uint, 0);
+module_param(mask, uint, 0);
module_param(mode, int, 0);
module_param(sec, uint, 0);
MODULE_PARM_DESC(sec, "Length in seconds of speed tests "
diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h
index 966bbfaf95b..6c7e21a09f7 100644
--- a/crypto/tcrypt.h
+++ b/crypto/tcrypt.h
@@ -22,9 +22,15 @@ struct cipher_speed_template {
unsigned int klen;
};
+struct aead_speed_template {
+ const char *key;
+ unsigned int klen;
+};
+
struct hash_speed {
unsigned int blen; /* buffer length */
unsigned int plen; /* per-update length */
+ unsigned int klen; /* key length */
};
/*
@@ -46,11 +52,20 @@ static struct cipher_speed_template des3_speed_template[] = {
*/
static u8 speed_template_8[] = {8, 0};
static u8 speed_template_24[] = {24, 0};
+static u8 speed_template_8_16[] = {8, 16, 0};
static u8 speed_template_8_32[] = {8, 32, 0};
static u8 speed_template_16_32[] = {16, 32, 0};
static u8 speed_template_16_24_32[] = {16, 24, 32, 0};
+static u8 speed_template_20_28_36[] = {20, 28, 36, 0};
static u8 speed_template_32_40_48[] = {32, 40, 48, 0};
+static u8 speed_template_32_48[] = {32, 48, 0};
static u8 speed_template_32_48_64[] = {32, 48, 64, 0};
+static u8 speed_template_32_64[] = {32, 64, 0};
+
+/*
+ * AEAD speed tests
+ */
+static u8 aead_speed_template_20[] = {20, 0};
/*
* Digest speed tests
@@ -83,4 +98,32 @@ static struct hash_speed generic_hash_speed_template[] = {
{ .blen = 0, .plen = 0, }
};
+static struct hash_speed hash_speed_template_16[] = {
+ { .blen = 16, .plen = 16, .klen = 16, },
+ { .blen = 64, .plen = 16, .klen = 16, },
+ { .blen = 64, .plen = 64, .klen = 16, },
+ { .blen = 256, .plen = 16, .klen = 16, },
+ { .blen = 256, .plen = 64, .klen = 16, },
+ { .blen = 256, .plen = 256, .klen = 16, },
+ { .blen = 1024, .plen = 16, .klen = 16, },
+ { .blen = 1024, .plen = 256, .klen = 16, },
+ { .blen = 1024, .plen = 1024, .klen = 16, },
+ { .blen = 2048, .plen = 16, .klen = 16, },
+ { .blen = 2048, .plen = 256, .klen = 16, },
+ { .blen = 2048, .plen = 1024, .klen = 16, },
+ { .blen = 2048, .plen = 2048, .klen = 16, },
+ { .blen = 4096, .plen = 16, .klen = 16, },
+ { .blen = 4096, .plen = 256, .klen = 16, },
+ { .blen = 4096, .plen = 1024, .klen = 16, },
+ { .blen = 4096, .plen = 4096, .klen = 16, },
+ { .blen = 8192, .plen = 16, .klen = 16, },
+ { .blen = 8192, .plen = 256, .klen = 16, },
+ { .blen = 8192, .plen = 1024, .klen = 16, },
+ { .blen = 8192, .plen = 4096, .klen = 16, },
+ { .blen = 8192, .plen = 8192, .klen = 16, },
+
+ /* End marker */
+ { .blen = 0, .plen = 0, .klen = 0, }
+};
+
#endif /* _CRYPTO_TCRYPT_H */
diff --git a/crypto/tea.c b/crypto/tea.c
index 412bc74f817..0a572323ee4 100644
--- a/crypto/tea.c
+++ b/crypto/tea.c
@@ -219,84 +219,55 @@ static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
out[1] = cpu_to_le32(z);
}
-static struct crypto_alg tea_alg = {
+static struct crypto_alg tea_algs[3] = { {
.cra_name = "tea",
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = TEA_BLOCK_SIZE,
.cra_ctxsize = sizeof (struct tea_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(tea_alg.cra_list),
.cra_u = { .cipher = {
.cia_min_keysize = TEA_KEY_SIZE,
.cia_max_keysize = TEA_KEY_SIZE,
.cia_setkey = tea_setkey,
.cia_encrypt = tea_encrypt,
.cia_decrypt = tea_decrypt } }
-};
-
-static struct crypto_alg xtea_alg = {
+}, {
.cra_name = "xtea",
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = XTEA_BLOCK_SIZE,
.cra_ctxsize = sizeof (struct xtea_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list),
.cra_u = { .cipher = {
.cia_min_keysize = XTEA_KEY_SIZE,
.cia_max_keysize = XTEA_KEY_SIZE,
.cia_setkey = xtea_setkey,
.cia_encrypt = xtea_encrypt,
.cia_decrypt = xtea_decrypt } }
-};
-
-static struct crypto_alg xeta_alg = {
+}, {
.cra_name = "xeta",
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = XTEA_BLOCK_SIZE,
.cra_ctxsize = sizeof (struct xtea_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list),
.cra_u = { .cipher = {
.cia_min_keysize = XTEA_KEY_SIZE,
.cia_max_keysize = XTEA_KEY_SIZE,
.cia_setkey = xtea_setkey,
.cia_encrypt = xeta_encrypt,
.cia_decrypt = xeta_decrypt } }
-};
+} };
static int __init tea_mod_init(void)
{
- int ret = 0;
-
- ret = crypto_register_alg(&tea_alg);
- if (ret < 0)
- goto out;
-
- ret = crypto_register_alg(&xtea_alg);
- if (ret < 0) {
- crypto_unregister_alg(&tea_alg);
- goto out;
- }
-
- ret = crypto_register_alg(&xeta_alg);
- if (ret < 0) {
- crypto_unregister_alg(&tea_alg);
- crypto_unregister_alg(&xtea_alg);
- goto out;
- }
-
-out:
- return ret;
+ return crypto_register_algs(tea_algs, ARRAY_SIZE(tea_algs));
}
static void __exit tea_mod_fini(void)
{
- crypto_unregister_alg(&tea_alg);
- crypto_unregister_alg(&xtea_alg);
- crypto_unregister_alg(&xeta_alg);
+ crypto_unregister_algs(tea_algs, ARRAY_SIZE(tea_algs));
}
MODULE_ALIAS("xtea");
diff --git a/crypto/testmgr.c b/crypto/testmgr.c
index a75f11ffb95..498649ac195 100644
--- a/crypto/testmgr.c
+++ b/crypto/testmgr.c
@@ -6,6 +6,13 @@
* Copyright (c) 2007 Nokia Siemens Networks
* Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
*
+ * Updated RFC4106 AES-GCM testing.
+ * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
+ * Adrian Hoban <adrian.hoban@intel.com>
+ * Gabriele Paoloni <gabriele.paoloni@intel.com>
+ * Tadeusz Struk (tadeusz.struk@intel.com)
+ * Copyright (c) 2010, Intel Corporation.
+ *
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
@@ -19,8 +26,20 @@
#include <linux/scatterlist.h>
#include <linux/slab.h>
#include <linux/string.h>
+#include <crypto/rng.h>
#include "internal.h"
+
+#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
+
+/* a perfect nop */
+int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
+{
+ return 0;
+}
+
+#else
+
#include "testmgr.h"
/*
@@ -72,29 +91,41 @@ struct comp_test_suite {
} comp, decomp;
};
+struct pcomp_test_suite {
+ struct {
+ struct pcomp_testvec *vecs;
+ unsigned int count;
+ } comp, decomp;
+};
+
struct hash_test_suite {
struct hash_testvec *vecs;
unsigned int count;
};
+struct cprng_test_suite {
+ struct cprng_testvec *vecs;
+ unsigned int count;
+};
+
struct alg_test_desc {
const char *alg;
int (*test)(const struct alg_test_desc *desc, const char *driver,
u32 type, u32 mask);
+ int fips_allowed; /* set if alg is allowed in fips mode */
union {
struct aead_test_suite aead;
struct cipher_test_suite cipher;
struct comp_test_suite comp;
+ struct pcomp_test_suite pcomp;
struct hash_test_suite hash;
+ struct cprng_test_suite cprng;
} suite;
};
static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
-static char *xbuf[XBUFSIZE];
-static char *axbuf[XBUFSIZE];
-
static void hexdump(unsigned char *buf, unsigned int len)
{
print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
@@ -113,8 +144,49 @@ static void tcrypt_complete(struct crypto_async_request *req, int err)
complete(&res->completion);
}
-static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
- unsigned int tcount)
+static int testmgr_alloc_buf(char *buf[XBUFSIZE])
+{
+ int i;
+
+ for (i = 0; i < XBUFSIZE; i++) {
+ buf[i] = (void *)__get_free_page(GFP_KERNEL);
+ if (!buf[i])
+ goto err_free_buf;
+ }
+
+ return 0;
+
+err_free_buf:
+ while (i-- > 0)
+ free_page((unsigned long)buf[i]);
+
+ return -ENOMEM;
+}
+
+static void testmgr_free_buf(char *buf[XBUFSIZE])
+{
+ int i;
+
+ for (i = 0; i < XBUFSIZE; i++)
+ free_page((unsigned long)buf[i]);
+}
+
+static int do_one_async_hash_op(struct ahash_request *req,
+ struct tcrypt_result *tr,
+ int ret)
+{
+ if (ret == -EINPROGRESS || ret == -EBUSY) {
+ ret = wait_for_completion_interruptible(&tr->completion);
+ if (!ret)
+ ret = tr->err;
+ reinit_completion(&tr->completion);
+ }
+ return ret;
+}
+
+static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
+ unsigned int tcount, bool use_digest,
+ const int align_offset)
{
const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
unsigned int i, j, k, temp;
@@ -122,8 +194,12 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
char result[64];
struct ahash_request *req;
struct tcrypt_result tresult;
- int ret;
void *hash_buff;
+ char *xbuf[XBUFSIZE];
+ int ret = -ENOMEM;
+
+ if (testmgr_alloc_buf(xbuf))
+ goto out_nobuf;
init_completion(&tresult.completion);
@@ -131,16 +207,25 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
if (!req) {
printk(KERN_ERR "alg: hash: Failed to allocate request for "
"%s\n", algo);
- ret = -ENOMEM;
goto out_noreq;
}
ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
tcrypt_complete, &tresult);
+ j = 0;
for (i = 0; i < tcount; i++) {
+ if (template[i].np)
+ continue;
+
+ ret = -EINVAL;
+ if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
+ goto out;
+
+ j++;
memset(result, 0, 64);
hash_buff = xbuf[0];
+ hash_buff += align_offset;
memcpy(hash_buff, template[i].plaintext, template[i].psize);
sg_init_one(&sg[0], hash_buff, template[i].psize);
@@ -151,36 +236,49 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
template[i].ksize);
if (ret) {
printk(KERN_ERR "alg: hash: setkey failed on "
- "test %d for %s: ret=%d\n", i + 1, algo,
+ "test %d for %s: ret=%d\n", j, algo,
-ret);
goto out;
}
}
ahash_request_set_crypt(req, sg, result, template[i].psize);
- ret = crypto_ahash_digest(req);
- switch (ret) {
- case 0:
- break;
- case -EINPROGRESS:
- case -EBUSY:
- ret = wait_for_completion_interruptible(
- &tresult.completion);
- if (!ret && !(ret = tresult.err)) {
- INIT_COMPLETION(tresult.completion);
- break;
+ if (use_digest) {
+ ret = do_one_async_hash_op(req, &tresult,
+ crypto_ahash_digest(req));
+ if (ret) {
+ pr_err("alg: hash: digest failed on test %d "
+ "for %s: ret=%d\n", j, algo, -ret);
+ goto out;
+ }
+ } else {
+ ret = do_one_async_hash_op(req, &tresult,
+ crypto_ahash_init(req));
+ if (ret) {
+ pr_err("alt: hash: init failed on test %d "
+ "for %s: ret=%d\n", j, algo, -ret);
+ goto out;
+ }
+ ret = do_one_async_hash_op(req, &tresult,
+ crypto_ahash_update(req));
+ if (ret) {
+ pr_err("alt: hash: update failed on test %d "
+ "for %s: ret=%d\n", j, algo, -ret);
+ goto out;
+ }
+ ret = do_one_async_hash_op(req, &tresult,
+ crypto_ahash_final(req));
+ if (ret) {
+ pr_err("alt: hash: final failed on test %d "
+ "for %s: ret=%d\n", j, algo, -ret);
+ goto out;
}
- /* fall through */
- default:
- printk(KERN_ERR "alg: hash: digest failed on test %d "
- "for %s: ret=%d\n", i + 1, algo, -ret);
- goto out;
}
if (memcmp(result, template[i].digest,
crypto_ahash_digestsize(tfm))) {
printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
- i + 1, algo);
+ j, algo);
hexdump(result, crypto_ahash_digestsize(tfm));
ret = -EINVAL;
goto out;
@@ -189,13 +287,21 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
j = 0;
for (i = 0; i < tcount; i++) {
+ /* alignment tests are only done with continuous buffers */
+ if (align_offset != 0)
+ break;
+
if (template[i].np) {
j++;
memset(result, 0, 64);
temp = 0;
sg_init_table(sg, template[i].np);
+ ret = -EINVAL;
for (k = 0; k < template[i].np; k++) {
+ if (WARN_ON(offset_in_page(IDX[k]) +
+ template[i].tap[k] > PAGE_SIZE))
+ goto out;
sg_set_buf(&sg[k],
memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]),
@@ -230,7 +336,7 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
ret = wait_for_completion_interruptible(
&tresult.completion);
if (!ret && !(ret = tresult.err)) {
- INIT_COMPLETION(tresult.completion);
+ reinit_completion(&tresult.completion);
break;
}
/* fall through */
@@ -257,26 +363,83 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
out:
ahash_request_free(req);
out_noreq:
+ testmgr_free_buf(xbuf);
+out_nobuf:
return ret;
}
-static int test_aead(struct crypto_aead *tfm, int enc,
- struct aead_testvec *template, unsigned int tcount)
+static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
+ unsigned int tcount, bool use_digest)
+{
+ unsigned int alignmask;
+ int ret;
+
+ ret = __test_hash(tfm, template, tcount, use_digest, 0);
+ if (ret)
+ return ret;
+
+ /* test unaligned buffers, check with one byte offset */
+ ret = __test_hash(tfm, template, tcount, use_digest, 1);
+ if (ret)
+ return ret;
+
+ alignmask = crypto_tfm_alg_alignmask(&tfm->base);
+ if (alignmask) {
+ /* Check if alignment mask for tfm is correctly set. */
+ ret = __test_hash(tfm, template, tcount, use_digest,
+ alignmask + 1);
+ if (ret)
+ return ret;
+ }
+
+ return 0;
+}
+
+static int __test_aead(struct crypto_aead *tfm, int enc,
+ struct aead_testvec *template, unsigned int tcount,
+ const bool diff_dst, const int align_offset)
{
const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
unsigned int i, j, k, n, temp;
- int ret = 0;
+ int ret = -ENOMEM;
char *q;
char *key;
struct aead_request *req;
- struct scatterlist sg[8];
- struct scatterlist asg[8];
- const char *e;
+ struct scatterlist *sg;
+ struct scatterlist *asg;
+ struct scatterlist *sgout;
+ const char *e, *d;
struct tcrypt_result result;
unsigned int authsize;
void *input;
+ void *output;
void *assoc;
- char iv[MAX_IVLEN];
+ char *iv;
+ char *xbuf[XBUFSIZE];
+ char *xoutbuf[XBUFSIZE];
+ char *axbuf[XBUFSIZE];
+
+ iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
+ if (!iv)
+ return ret;
+ if (testmgr_alloc_buf(xbuf))
+ goto out_noxbuf;
+ if (testmgr_alloc_buf(axbuf))
+ goto out_noaxbuf;
+ if (diff_dst && testmgr_alloc_buf(xoutbuf))
+ goto out_nooutbuf;
+
+ /* avoid "the frame size is larger than 1024 bytes" compiler warning */
+ sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL);
+ if (!sg)
+ goto out_nosg;
+ asg = &sg[8];
+ sgout = &asg[8];
+
+ if (diff_dst)
+ d = "-ddst";
+ else
+ d = "";
if (enc == ENCRYPT)
e = "encryption";
@@ -287,9 +450,8 @@ static int test_aead(struct crypto_aead *tfm, int enc,
req = aead_request_alloc(tfm, GFP_KERNEL);
if (!req) {
- printk(KERN_ERR "alg: aead: Failed to allocate request for "
- "%s\n", algo);
- ret = -ENOMEM;
+ pr_err("alg: aead%s: Failed to allocate request for %s\n",
+ d, algo);
goto out;
}
@@ -300,12 +462,18 @@ static int test_aead(struct crypto_aead *tfm, int enc,
if (!template[i].np) {
j++;
- /* some tepmplates have no input data but they will
+ /* some templates have no input data but they will
* touch input
*/
input = xbuf[0];
+ input += align_offset;
assoc = axbuf[0];
+ ret = -EINVAL;
+ if (WARN_ON(align_offset + template[i].ilen >
+ PAGE_SIZE || template[i].alen > PAGE_SIZE))
+ goto out;
+
memcpy(input, template[i].input, template[i].ilen);
memcpy(assoc, template[i].assoc, template[i].alen);
if (template[i].iv)
@@ -323,9 +491,8 @@ static int test_aead(struct crypto_aead *tfm, int enc,
ret = crypto_aead_setkey(tfm, key,
template[i].klen);
if (!ret == template[i].fail) {
- printk(KERN_ERR "alg: aead: setkey failed on "
- "test %d for %s: flags=%x\n", j, algo,
- crypto_aead_get_flags(tfm));
+ pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
+ d, j, algo, crypto_aead_get_flags(tfm));
goto out;
} else if (ret)
continue;
@@ -333,18 +500,27 @@ static int test_aead(struct crypto_aead *tfm, int enc,
authsize = abs(template[i].rlen - template[i].ilen);
ret = crypto_aead_setauthsize(tfm, authsize);
if (ret) {
- printk(KERN_ERR "alg: aead: Failed to set "
- "authsize to %u on test %d for %s\n",
- authsize, j, algo);
+ pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
+ d, authsize, j, algo);
goto out;
}
- sg_init_one(&sg[0], input,
- template[i].ilen + (enc ? authsize : 0));
+ if (diff_dst) {
+ output = xoutbuf[0];
+ output += align_offset;
+ sg_init_one(&sg[0], input, template[i].ilen);
+ sg_init_one(&sgout[0], output,
+ template[i].rlen);
+ } else {
+ sg_init_one(&sg[0], input,
+ template[i].ilen +
+ (enc ? authsize : 0));
+ output = input;
+ }
sg_init_one(&asg[0], assoc, template[i].alen);
- aead_request_set_crypt(req, sg, sg,
+ aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
template[i].ilen, iv);
aead_request_set_assoc(req, asg, template[i].alen);
@@ -355,26 +531,38 @@ static int test_aead(struct crypto_aead *tfm, int enc,
switch (ret) {
case 0:
+ if (template[i].novrfy) {
+ /* verification was supposed to fail */
+ pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
+ d, e, j, algo);
+ /* so really, we got a bad message */
+ ret = -EBADMSG;
+ goto out;
+ }
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&result.completion);
if (!ret && !(ret = result.err)) {
- INIT_COMPLETION(result.completion);
+ reinit_completion(&result.completion);
break;
}
+ case -EBADMSG:
+ if (template[i].novrfy)
+ /* verification failure was expected */
+ continue;
/* fall through */
default:
- printk(KERN_ERR "alg: aead: %s failed on test "
- "%d for %s: ret=%d\n", e, j, algo, -ret);
+ pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
+ d, e, j, algo, -ret);
goto out;
}
- q = input;
+ q = output;
if (memcmp(q, template[i].result, template[i].rlen)) {
- printk(KERN_ERR "alg: aead: Test %d failed on "
- "%s for %s\n", j, e, algo);
+ pr_err("alg: aead%s: Test %d failed on %s for %s\n",
+ d, j, e, algo);
hexdump(q, template[i].rlen);
ret = -EINVAL;
goto out;
@@ -383,6 +571,10 @@ static int test_aead(struct crypto_aead *tfm, int enc,
}
for (i = 0, j = 0; i < tcount; i++) {
+ /* alignment tests are only done with continuous buffers */
+ if (align_offset != 0)
+ break;
+
if (template[i].np) {
j++;
@@ -399,9 +591,8 @@ static int test_aead(struct crypto_aead *tfm, int enc,
ret = crypto_aead_setkey(tfm, key, template[i].klen);
if (!ret == template[i].fail) {
- printk(KERN_ERR "alg: aead: setkey failed on "
- "chunk test %d for %s: flags=%x\n", j,
- algo, crypto_aead_get_flags(tfm));
+ pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
+ d, j, algo, crypto_aead_get_flags(tfm));
goto out;
} else if (ret)
continue;
@@ -410,6 +601,8 @@ static int test_aead(struct crypto_aead *tfm, int enc,
ret = -EINVAL;
sg_init_table(sg, template[i].np);
+ if (diff_dst)
+ sg_init_table(sgout, template[i].np);
for (k = 0, temp = 0; k < template[i].np; k++) {
if (WARN_ON(offset_in_page(IDX[k]) +
template[i].tap[k] > PAGE_SIZE))
@@ -421,21 +614,31 @@ static int test_aead(struct crypto_aead *tfm, int enc,
memcpy(q, template[i].input + temp,
template[i].tap[k]);
+ sg_set_buf(&sg[k], q, template[i].tap[k]);
+
+ if (diff_dst) {
+ q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
+ offset_in_page(IDX[k]);
+
+ memset(q, 0, template[i].tap[k]);
+
+ sg_set_buf(&sgout[k], q,
+ template[i].tap[k]);
+ }
+
n = template[i].tap[k];
if (k == template[i].np - 1 && enc)
n += authsize;
if (offset_in_page(q) + n < PAGE_SIZE)
q[n] = 0;
- sg_set_buf(&sg[k], q, template[i].tap[k]);
temp += template[i].tap[k];
}
ret = crypto_aead_setauthsize(tfm, authsize);
if (ret) {
- printk(KERN_ERR "alg: aead: Failed to set "
- "authsize to %u on chunk test %d for "
- "%s\n", authsize, j, algo);
+ pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
+ d, authsize, j, algo);
goto out;
}
@@ -447,11 +650,18 @@ static int test_aead(struct crypto_aead *tfm, int enc,
goto out;
}
- sg[k - 1].length += authsize;
+ if (diff_dst)
+ sgout[k - 1].length += authsize;
+ else
+ sg[k - 1].length += authsize;
}
sg_init_table(asg, template[i].anp);
+ ret = -EINVAL;
for (k = 0, temp = 0; k < template[i].anp; k++) {
+ if (WARN_ON(offset_in_page(IDX[k]) +
+ template[i].atap[k] > PAGE_SIZE))
+ goto out;
sg_set_buf(&asg[k],
memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
offset_in_page(IDX[k]),
@@ -461,7 +671,7 @@ static int test_aead(struct crypto_aead *tfm, int enc,
temp += template[i].atap[k];
}
- aead_request_set_crypt(req, sg, sg,
+ aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
template[i].ilen,
iv);
@@ -473,43 +683,58 @@ static int test_aead(struct crypto_aead *tfm, int enc,
switch (ret) {
case 0:
+ if (template[i].novrfy) {
+ /* verification was supposed to fail */
+ pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
+ d, e, j, algo);
+ /* so really, we got a bad message */
+ ret = -EBADMSG;
+ goto out;
+ }
break;
case -EINPROGRESS:
case -EBUSY:
ret = wait_for_completion_interruptible(
&result.completion);
if (!ret && !(ret = result.err)) {
- INIT_COMPLETION(result.completion);
+ reinit_completion(&result.completion);
break;
}
+ case -EBADMSG:
+ if (template[i].novrfy)
+ /* verification failure was expected */
+ continue;
/* fall through */
default:
- printk(KERN_ERR "alg: aead: %s failed on "
- "chunk test %d for %s: ret=%d\n", e, j,
- algo, -ret);
+ pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
+ d, e, j, algo, -ret);
goto out;
}
ret = -EINVAL;
for (k = 0, temp = 0; k < template[i].np; k++) {
- q = xbuf[IDX[k] >> PAGE_SHIFT] +
- offset_in_page(IDX[k]);
+ if (diff_dst)
+ q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
+ offset_in_page(IDX[k]);
+ else
+ q = xbuf[IDX[k] >> PAGE_SHIFT] +
+ offset_in_page(IDX[k]);
n = template[i].tap[k];
if (k == template[i].np - 1)
n += enc ? authsize : -authsize;
if (memcmp(q, template[i].result + temp, n)) {
- printk(KERN_ERR "alg: aead: Chunk "
- "test %d failed on %s at page "
- "%u for %s\n", j, e, k, algo);
+ pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
+ d, j, e, k, algo);
hexdump(q, n);
goto out;
}
q += n;
if (k == template[i].np - 1 && !enc) {
- if (memcmp(q, template[i].input +
+ if (!diff_dst &&
+ memcmp(q, template[i].input +
temp + n, authsize))
n = authsize;
else
@@ -520,11 +745,8 @@ static int test_aead(struct crypto_aead *tfm, int enc,
;
}
if (n) {
- printk(KERN_ERR "alg: aead: Result "
- "buffer corruption in chunk "
- "test %d on %s at page %u for "
- "%s: %u bytes:\n", j, e, k,
- algo, n);
+ pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
+ d, j, e, k, algo, n);
hexdump(q, n);
goto out;
}
@@ -538,18 +760,65 @@ static int test_aead(struct crypto_aead *tfm, int enc,
out:
aead_request_free(req);
+ kfree(sg);
+out_nosg:
+ if (diff_dst)
+ testmgr_free_buf(xoutbuf);
+out_nooutbuf:
+ testmgr_free_buf(axbuf);
+out_noaxbuf:
+ testmgr_free_buf(xbuf);
+out_noxbuf:
+ kfree(iv);
return ret;
}
+static int test_aead(struct crypto_aead *tfm, int enc,
+ struct aead_testvec *template, unsigned int tcount)
+{
+ unsigned int alignmask;
+ int ret;
+
+ /* test 'dst == src' case */
+ ret = __test_aead(tfm, enc, template, tcount, false, 0);
+ if (ret)
+ return ret;
+
+ /* test 'dst != src' case */
+ ret = __test_aead(tfm, enc, template, tcount, true, 0);
+ if (ret)
+ return ret;
+
+ /* test unaligned buffers, check with one byte offset */
+ ret = __test_aead(tfm, enc, template, tcount, true, 1);
+ if (ret)
+ return ret;
+
+ alignmask = crypto_tfm_alg_alignmask(&tfm->base);
+ if (alignmask) {
+ /* Check if alignment mask for tfm is correctly set. */
+ ret = __test_aead(tfm, enc, template, tcount, true,
+ alignmask + 1);
+ if (ret)
+ return ret;
+ }
+
+ return 0;
+}
+
static int test_cipher(struct crypto_cipher *tfm, int enc,
struct cipher_testvec *template, unsigned int tcount)
{
const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
unsigned int i, j, k;
- int ret;
char *q;
const char *e;
void *data;
+ char *xbuf[XBUFSIZE];
+ int ret = -ENOMEM;
+
+ if (testmgr_alloc_buf(xbuf))
+ goto out_nobuf;
if (enc == ENCRYPT)
e = "encryption";
@@ -563,6 +832,10 @@ static int test_cipher(struct crypto_cipher *tfm, int enc,
j++;
+ ret = -EINVAL;
+ if (WARN_ON(template[i].ilen > PAGE_SIZE))
+ goto out;
+
data = xbuf[0];
memcpy(data, template[i].input, template[i].ilen);
@@ -603,23 +876,40 @@ static int test_cipher(struct crypto_cipher *tfm, int enc,
ret = 0;
out:
+ testmgr_free_buf(xbuf);
+out_nobuf:
return ret;
}
-static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
- struct cipher_testvec *template, unsigned int tcount)
+static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
+ struct cipher_testvec *template, unsigned int tcount,
+ const bool diff_dst, const int align_offset)
{
const char *algo =
crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
unsigned int i, j, k, n, temp;
- int ret;
char *q;
struct ablkcipher_request *req;
struct scatterlist sg[8];
- const char *e;
+ struct scatterlist sgout[8];
+ const char *e, *d;
struct tcrypt_result result;
void *data;
char iv[MAX_IVLEN];
+ char *xbuf[XBUFSIZE];
+ char *xoutbuf[XBUFSIZE];
+ int ret = -ENOMEM;
+
+ if (testmgr_alloc_buf(xbuf))
+ goto out_nobuf;
+
+ if (diff_dst && testmgr_alloc_buf(xoutbuf))
+ goto out_nooutbuf;
+
+ if (diff_dst)
+ d = "-ddst";
+ else
+ d = "";
if (enc == ENCRYPT)
e = "encryption";
@@ -630,9 +920,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
if (!req) {
- printk(KERN_ERR "alg: skcipher: Failed to allocate request "
- "for %s\n", algo);
- ret = -ENOMEM;
+ pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
+ d, algo);
goto out;
}
@@ -646,10 +935,16 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
else
memset(iv, 0, MAX_IVLEN);
- if (!(template[i].np)) {
+ if (!(template[i].np) || (template[i].also_non_np)) {
j++;
+ ret = -EINVAL;
+ if (WARN_ON(align_offset + template[i].ilen >
+ PAGE_SIZE))
+ goto out;
+
data = xbuf[0];
+ data += align_offset;
memcpy(data, template[i].input, template[i].ilen);
crypto_ablkcipher_clear_flags(tfm, ~0);
@@ -660,16 +955,22 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
ret = crypto_ablkcipher_setkey(tfm, template[i].key,
template[i].klen);
if (!ret == template[i].fail) {
- printk(KERN_ERR "alg: skcipher: setkey failed "
- "on test %d for %s: flags=%x\n", j,
- algo, crypto_ablkcipher_get_flags(tfm));
+ pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
+ d, j, algo,
+ crypto_ablkcipher_get_flags(tfm));
goto out;
} else if (ret)
continue;
sg_init_one(&sg[0], data, template[i].ilen);
+ if (diff_dst) {
+ data = xoutbuf[0];
+ data += align_offset;
+ sg_init_one(&sgout[0], data, template[i].ilen);
+ }
- ablkcipher_request_set_crypt(req, sg, sg,
+ ablkcipher_request_set_crypt(req, sg,
+ (diff_dst) ? sgout : sg,
template[i].ilen, iv);
ret = enc ?
crypto_ablkcipher_encrypt(req) :
@@ -683,21 +984,20 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
ret = wait_for_completion_interruptible(
&result.completion);
if (!ret && !((ret = result.err))) {
- INIT_COMPLETION(result.completion);
+ reinit_completion(&result.completion);
break;
}
/* fall through */
default:
- printk(KERN_ERR "alg: skcipher: %s failed on "
- "test %d for %s: ret=%d\n", e, j, algo,
- -ret);
+ pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
+ d, e, j, algo, -ret);
goto out;
}
q = data;
if (memcmp(q, template[i].result, template[i].rlen)) {
- printk(KERN_ERR "alg: skcipher: Test %d "
- "failed on %s for %s\n", j, e, algo);
+ pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
+ d, j, e, algo);
hexdump(q, template[i].rlen);
ret = -EINVAL;
goto out;
@@ -707,6 +1007,9 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
j = 0;
for (i = 0; i < tcount; i++) {
+ /* alignment tests are only done with continuous buffers */
+ if (align_offset != 0)
+ break;
if (template[i].iv)
memcpy(iv, template[i].iv, MAX_IVLEN);
@@ -724,9 +1027,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
ret = crypto_ablkcipher_setkey(tfm, template[i].key,
template[i].klen);
if (!ret == template[i].fail) {
- printk(KERN_ERR "alg: skcipher: setkey failed "
- "on chunk test %d for %s: flags=%x\n",
- j, algo,
+ pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
+ d, j, algo,
crypto_ablkcipher_get_flags(tfm));
goto out;
} else if (ret)
@@ -735,6 +1037,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
temp = 0;
ret = -EINVAL;
sg_init_table(sg, template[i].np);
+ if (diff_dst)
+ sg_init_table(sgout, template[i].np);
for (k = 0; k < template[i].np; k++) {
if (WARN_ON(offset_in_page(IDX[k]) +
template[i].tap[k] > PAGE_SIZE))
@@ -751,11 +1055,24 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
q[template[i].tap[k]] = 0;
sg_set_buf(&sg[k], q, template[i].tap[k]);
+ if (diff_dst) {
+ q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
+ offset_in_page(IDX[k]);
+
+ sg_set_buf(&sgout[k], q,
+ template[i].tap[k]);
+
+ memset(q, 0, template[i].tap[k]);
+ if (offset_in_page(q) +
+ template[i].tap[k] < PAGE_SIZE)
+ q[template[i].tap[k]] = 0;
+ }
temp += template[i].tap[k];
}
- ablkcipher_request_set_crypt(req, sg, sg,
+ ablkcipher_request_set_crypt(req, sg,
+ (diff_dst) ? sgout : sg,
template[i].ilen, iv);
ret = enc ?
@@ -770,28 +1087,30 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
ret = wait_for_completion_interruptible(
&result.completion);
if (!ret && !((ret = result.err))) {
- INIT_COMPLETION(result.completion);
+ reinit_completion(&result.completion);
break;
}
/* fall through */
default:
- printk(KERN_ERR "alg: skcipher: %s failed on "
- "chunk test %d for %s: ret=%d\n", e, j,
- algo, -ret);
+ pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
+ d, e, j, algo, -ret);
goto out;
}
temp = 0;
ret = -EINVAL;
for (k = 0; k < template[i].np; k++) {
- q = xbuf[IDX[k] >> PAGE_SHIFT] +
- offset_in_page(IDX[k]);
+ if (diff_dst)
+ q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
+ offset_in_page(IDX[k]);
+ else
+ q = xbuf[IDX[k] >> PAGE_SHIFT] +
+ offset_in_page(IDX[k]);
if (memcmp(q, template[i].result + temp,
template[i].tap[k])) {
- printk(KERN_ERR "alg: skcipher: Chunk "
- "test %d failed on %s at page "
- "%u for %s\n", j, e, k, algo);
+ pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
+ d, j, e, k, algo);
hexdump(q, template[i].tap[k]);
goto out;
}
@@ -800,11 +1119,8 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
for (n = 0; offset_in_page(q + n) && q[n]; n++)
;
if (n) {
- printk(KERN_ERR "alg: skcipher: "
- "Result buffer corruption in "
- "chunk test %d on %s at page "
- "%u for %s: %u bytes:\n", j, e,
- k, algo, n);
+ pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
+ d, j, e, k, algo, n);
hexdump(q, n);
goto out;
}
@@ -817,9 +1133,47 @@ static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
out:
ablkcipher_request_free(req);
+ if (diff_dst)
+ testmgr_free_buf(xoutbuf);
+out_nooutbuf:
+ testmgr_free_buf(xbuf);
+out_nobuf:
return ret;
}
+static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
+ struct cipher_testvec *template, unsigned int tcount)
+{
+ unsigned int alignmask;
+ int ret;
+
+ /* test 'dst == src' case */
+ ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
+ if (ret)
+ return ret;
+
+ /* test 'dst != src' case */
+ ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
+ if (ret)
+ return ret;
+
+ /* test unaligned buffers, check with one byte offset */
+ ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
+ if (ret)
+ return ret;
+
+ alignmask = crypto_tfm_alg_alignmask(&tfm->base);
+ if (alignmask) {
+ /* Check if alignment mask for tfm is correctly set. */
+ ret = __test_skcipher(tfm, enc, template, tcount, true,
+ alignmask + 1);
+ if (ret)
+ return ret;
+ }
+
+ return 0;
+}
+
static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
struct comp_testvec *dtemplate, int ctcount, int dtcount)
{
@@ -829,7 +1183,8 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
int ret;
for (i = 0; i < ctcount; i++) {
- int ilen, dlen = COMP_BUF_SIZE;
+ int ilen;
+ unsigned int dlen = COMP_BUF_SIZE;
memset(result, 0, sizeof (result));
@@ -861,7 +1216,8 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
}
for (i = 0; i < dtcount; i++) {
- int ilen, dlen = COMP_BUF_SIZE;
+ int ilen;
+ unsigned int dlen = COMP_BUF_SIZE;
memset(result, 0, sizeof (result));
@@ -898,6 +1254,244 @@ out:
return ret;
}
+static int test_pcomp(struct crypto_pcomp *tfm,
+ struct pcomp_testvec *ctemplate,
+ struct pcomp_testvec *dtemplate, int ctcount,
+ int dtcount)
+{
+ const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
+ unsigned int i;
+ char result[COMP_BUF_SIZE];
+ int res;
+
+ for (i = 0; i < ctcount; i++) {
+ struct comp_request req;
+ unsigned int produced = 0;
+
+ res = crypto_compress_setup(tfm, ctemplate[i].params,
+ ctemplate[i].paramsize);
+ if (res) {
+ pr_err("alg: pcomp: compression setup failed on test "
+ "%d for %s: error=%d\n", i + 1, algo, res);
+ return res;
+ }
+
+ res = crypto_compress_init(tfm);
+ if (res) {
+ pr_err("alg: pcomp: compression init failed on test "
+ "%d for %s: error=%d\n", i + 1, algo, res);
+ return res;
+ }
+
+ memset(result, 0, sizeof(result));
+
+ req.next_in = ctemplate[i].input;
+ req.avail_in = ctemplate[i].inlen / 2;
+ req.next_out = result;
+ req.avail_out = ctemplate[i].outlen / 2;
+
+ res = crypto_compress_update(tfm, &req);
+ if (res < 0 && (res != -EAGAIN || req.avail_in)) {
+ pr_err("alg: pcomp: compression update failed on test "
+ "%d for %s: error=%d\n", i + 1, algo, res);
+ return res;
+ }
+ if (res > 0)
+ produced += res;
+
+ /* Add remaining input data */
+ req.avail_in += (ctemplate[i].inlen + 1) / 2;
+
+ res = crypto_compress_update(tfm, &req);
+ if (res < 0 && (res != -EAGAIN || req.avail_in)) {
+ pr_err("alg: pcomp: compression update failed on test "
+ "%d for %s: error=%d\n", i + 1, algo, res);
+ return res;
+ }
+ if (res > 0)
+ produced += res;
+
+ /* Provide remaining output space */
+ req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
+
+ res = crypto_compress_final(tfm, &req);
+ if (res < 0) {
+ pr_err("alg: pcomp: compression final failed on test "
+ "%d for %s: error=%d\n", i + 1, algo, res);
+ return res;
+ }
+ produced += res;
+
+ if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
+ pr_err("alg: comp: Compression test %d failed for %s: "
+ "output len = %d (expected %d)\n", i + 1, algo,
+ COMP_BUF_SIZE - req.avail_out,
+ ctemplate[i].outlen);
+ return -EINVAL;
+ }
+
+ if (produced != ctemplate[i].outlen) {
+ pr_err("alg: comp: Compression test %d failed for %s: "
+ "returned len = %u (expected %d)\n", i + 1,
+ algo, produced, ctemplate[i].outlen);
+ return -EINVAL;
+ }
+
+ if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
+ pr_err("alg: pcomp: Compression test %d failed for "
+ "%s\n", i + 1, algo);
+ hexdump(result, ctemplate[i].outlen);
+ return -EINVAL;
+ }
+ }
+
+ for (i = 0; i < dtcount; i++) {
+ struct comp_request req;
+ unsigned int produced = 0;
+
+ res = crypto_decompress_setup(tfm, dtemplate[i].params,
+ dtemplate[i].paramsize);
+ if (res) {
+ pr_err("alg: pcomp: decompression setup failed on "
+ "test %d for %s: error=%d\n", i + 1, algo, res);
+ return res;
+ }
+
+ res = crypto_decompress_init(tfm);
+ if (res) {
+ pr_err("alg: pcomp: decompression init failed on test "
+ "%d for %s: error=%d\n", i + 1, algo, res);
+ return res;
+ }
+
+ memset(result, 0, sizeof(result));
+
+ req.next_in = dtemplate[i].input;
+ req.avail_in = dtemplate[i].inlen / 2;
+ req.next_out = result;
+ req.avail_out = dtemplate[i].outlen / 2;
+
+ res = crypto_decompress_update(tfm, &req);
+ if (res < 0 && (res != -EAGAIN || req.avail_in)) {
+ pr_err("alg: pcomp: decompression update failed on "
+ "test %d for %s: error=%d\n", i + 1, algo, res);
+ return res;
+ }
+ if (res > 0)
+ produced += res;
+
+ /* Add remaining input data */
+ req.avail_in += (dtemplate[i].inlen + 1) / 2;
+
+ res = crypto_decompress_update(tfm, &req);
+ if (res < 0 && (res != -EAGAIN || req.avail_in)) {
+ pr_err("alg: pcomp: decompression update failed on "
+ "test %d for %s: error=%d\n", i + 1, algo, res);
+ return res;
+ }
+ if (res > 0)
+ produced += res;
+
+ /* Provide remaining output space */
+ req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
+
+ res = crypto_decompress_final(tfm, &req);
+ if (res < 0 && (res != -EAGAIN || req.avail_in)) {
+ pr_err("alg: pcomp: decompression final failed on "
+ "test %d for %s: error=%d\n", i + 1, algo, res);
+ return res;
+ }
+ if (res > 0)
+ produced += res;
+
+ if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
+ pr_err("alg: comp: Decompression test %d failed for "
+ "%s: output len = %d (expected %d)\n", i + 1,
+ algo, COMP_BUF_SIZE - req.avail_out,
+ dtemplate[i].outlen);
+ return -EINVAL;
+ }
+
+ if (produced != dtemplate[i].outlen) {
+ pr_err("alg: comp: Decompression test %d failed for "
+ "%s: returned len = %u (expected %d)\n", i + 1,
+ algo, produced, dtemplate[i].outlen);
+ return -EINVAL;
+ }
+
+ if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
+ pr_err("alg: pcomp: Decompression test %d failed for "
+ "%s\n", i + 1, algo);
+ hexdump(result, dtemplate[i].outlen);
+ return -EINVAL;
+ }
+ }
+
+ return 0;
+}
+
+
+static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
+ unsigned int tcount)
+{
+ const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
+ int err = 0, i, j, seedsize;
+ u8 *seed;
+ char result[32];
+
+ seedsize = crypto_rng_seedsize(tfm);
+
+ seed = kmalloc(seedsize, GFP_KERNEL);
+ if (!seed) {
+ printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
+ "for %s\n", algo);
+ return -ENOMEM;
+ }
+
+ for (i = 0; i < tcount; i++) {
+ memset(result, 0, 32);
+
+ memcpy(seed, template[i].v, template[i].vlen);
+ memcpy(seed + template[i].vlen, template[i].key,
+ template[i].klen);
+ memcpy(seed + template[i].vlen + template[i].klen,
+ template[i].dt, template[i].dtlen);
+
+ err = crypto_rng_reset(tfm, seed, seedsize);
+ if (err) {
+ printk(KERN_ERR "alg: cprng: Failed to reset rng "
+ "for %s\n", algo);
+ goto out;
+ }
+
+ for (j = 0; j < template[i].loops; j++) {
+ err = crypto_rng_get_bytes(tfm, result,
+ template[i].rlen);
+ if (err != template[i].rlen) {
+ printk(KERN_ERR "alg: cprng: Failed to obtain "
+ "the correct amount of random data for "
+ "%s (requested %d, got %d)\n", algo,
+ template[i].rlen, err);
+ goto out;
+ }
+ }
+
+ err = memcmp(result, template[i].result,
+ template[i].rlen);
+ if (err) {
+ printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
+ i, algo);
+ hexdump(result, template[i].rlen);
+ err = -EINVAL;
+ goto out;
+ }
+ }
+
+out:
+ kfree(seed);
+ return err;
+}
+
static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
u32 type, u32 mask)
{
@@ -1007,6 +1601,28 @@ static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
return err;
}
+static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
+ u32 type, u32 mask)
+{
+ struct crypto_pcomp *tfm;
+ int err;
+
+ tfm = crypto_alloc_pcomp(driver, type, mask);
+ if (IS_ERR(tfm)) {
+ pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
+ driver, PTR_ERR(tfm));
+ return PTR_ERR(tfm);
+ }
+
+ err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
+ desc->suite.pcomp.decomp.vecs,
+ desc->suite.pcomp.comp.count,
+ desc->suite.pcomp.decomp.count);
+
+ crypto_free_pcomp(tfm);
+ return err;
+}
+
static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
u32 type, u32 mask)
{
@@ -1020,7 +1636,11 @@ static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
return PTR_ERR(tfm);
}
- err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count);
+ err = test_hash(tfm, desc->suite.hash.vecs,
+ desc->suite.hash.count, true);
+ if (!err)
+ err = test_hash(tfm, desc->suite.hash.vecs,
+ desc->suite.hash.count, false);
crypto_free_ahash(tfm);
return err;
@@ -1075,11 +1695,344 @@ out:
return err;
}
+static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
+ u32 type, u32 mask)
+{
+ struct crypto_rng *rng;
+ int err;
+
+ rng = crypto_alloc_rng(driver, type, mask);
+ if (IS_ERR(rng)) {
+ printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
+ "%ld\n", driver, PTR_ERR(rng));
+ return PTR_ERR(rng);
+ }
+
+ err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
+
+ crypto_free_rng(rng);
+
+ return err;
+}
+
+static int alg_test_null(const struct alg_test_desc *desc,
+ const char *driver, u32 type, u32 mask)
+{
+ return 0;
+}
+
/* Please keep this list sorted by algorithm name. */
static const struct alg_test_desc alg_test_descs[] = {
{
+ .alg = "__cbc-cast5-avx",
+ .test = alg_test_null,
+ }, {
+ .alg = "__cbc-cast6-avx",
+ .test = alg_test_null,
+ }, {
+ .alg = "__cbc-serpent-avx",
+ .test = alg_test_null,
+ }, {
+ .alg = "__cbc-serpent-avx2",
+ .test = alg_test_null,
+ }, {
+ .alg = "__cbc-serpent-sse2",
+ .test = alg_test_null,
+ }, {
+ .alg = "__cbc-twofish-avx",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-cbc-aes-aesni",
+ .test = alg_test_null,
+ .fips_allowed = 1,
+ }, {
+ .alg = "__driver-cbc-camellia-aesni",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-cbc-camellia-aesni-avx2",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-cbc-cast5-avx",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-cbc-cast6-avx",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-cbc-serpent-avx",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-cbc-serpent-avx2",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-cbc-serpent-sse2",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-cbc-twofish-avx",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-ecb-aes-aesni",
+ .test = alg_test_null,
+ .fips_allowed = 1,
+ }, {
+ .alg = "__driver-ecb-camellia-aesni",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-ecb-camellia-aesni-avx2",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-ecb-cast5-avx",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-ecb-cast6-avx",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-ecb-serpent-avx",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-ecb-serpent-avx2",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-ecb-serpent-sse2",
+ .test = alg_test_null,
+ }, {
+ .alg = "__driver-ecb-twofish-avx",
+ .test = alg_test_null,
+ }, {
+ .alg = "__ghash-pclmulqdqni",
+ .test = alg_test_null,
+ .fips_allowed = 1,
+ }, {
+ .alg = "ansi_cprng",
+ .test = alg_test_cprng,
+ .fips_allowed = 1,
+ .suite = {
+ .cprng = {
+ .vecs = ansi_cprng_aes_tv_template,
+ .count = ANSI_CPRNG_AES_TEST_VECTORS
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(md5),ecb(cipher_null))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
+ .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
+ .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha1),cbc(aes))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha1_aes_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA1_AES_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha1),cbc(des))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha1_des_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA1_DES_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha1),cbc(des3_ede))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha1_des3_ede_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha1),ecb(cipher_null))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha1_ecb_cipher_null_enc_tv_temp,
+ .count =
+ HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
+ },
+ .dec = {
+ .vecs =
+ hmac_sha1_ecb_cipher_null_dec_tv_temp,
+ .count =
+ HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha224),cbc(des))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha224_des_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA224_DES_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha224),cbc(des3_ede))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha224_des3_ede_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha256),cbc(aes))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha256_aes_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA256_AES_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha256),cbc(des))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha256_des_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA256_DES_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha256),cbc(des3_ede))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha256_des3_ede_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha384),cbc(des))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha384_des_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA384_DES_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha384),cbc(des3_ede))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha384_des3_ede_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha512),cbc(aes))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha512_aes_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA512_AES_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha512),cbc(des))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha512_des_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA512_DES_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
+ .alg = "authenc(hmac(sha512),cbc(des3_ede))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs =
+ hmac_sha512_des3_ede_cbc_enc_tv_temp,
+ .count =
+ HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
+ }
+ }
+ }
+ }, {
.alg = "cbc(aes)",
.test = alg_test_skcipher,
+ .fips_allowed = 1,
.suite = {
.cipher = {
.enc = {
@@ -1138,6 +2091,36 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "cbc(cast5)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = cast5_cbc_enc_tv_template,
+ .count = CAST5_CBC_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = cast5_cbc_dec_tv_template,
+ .count = CAST5_CBC_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "cbc(cast6)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = cast6_cbc_enc_tv_template,
+ .count = CAST6_CBC_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = cast6_cbc_dec_tv_template,
+ .count = CAST6_CBC_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
.alg = "cbc(des)",
.test = alg_test_skcipher,
.suite = {
@@ -1155,6 +2138,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "cbc(des3_ede)",
.test = alg_test_skcipher,
+ .fips_allowed = 1,
.suite = {
.cipher = {
.enc = {
@@ -1168,6 +2152,21 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "cbc(serpent)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = serpent_cbc_enc_tv_template,
+ .count = SERPENT_CBC_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = serpent_cbc_dec_tv_template,
+ .count = SERPENT_CBC_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
.alg = "cbc(twofish)",
.test = alg_test_skcipher,
.suite = {
@@ -1185,6 +2184,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "ccm(aes)",
.test = alg_test_aead,
+ .fips_allowed = 1,
.suite = {
.aead = {
.enc = {
@@ -1198,8 +2198,30 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "cmac(aes)",
+ .test = alg_test_hash,
+ .suite = {
+ .hash = {
+ .vecs = aes_cmac128_tv_template,
+ .count = CMAC_AES_TEST_VECTORS
+ }
+ }
+ }, {
+ .alg = "cmac(des3_ede)",
+ .test = alg_test_hash,
+ .suite = {
+ .hash = {
+ .vecs = des3_ede_cmac64_tv_template,
+ .count = CMAC_DES3_EDE_TEST_VECTORS
+ }
+ }
+ }, {
+ .alg = "compress_null",
+ .test = alg_test_null,
+ }, {
.alg = "crc32c",
.test = alg_test_crc32c,
+ .fips_allowed = 1,
.suite = {
.hash = {
.vecs = crc32c_tv_template,
@@ -1207,6 +2229,201 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "crct10dif",
+ .test = alg_test_hash,
+ .fips_allowed = 1,
+ .suite = {
+ .hash = {
+ .vecs = crct10dif_tv_template,
+ .count = CRCT10DIF_TEST_VECTORS
+ }
+ }
+ }, {
+ .alg = "cryptd(__driver-cbc-aes-aesni)",
+ .test = alg_test_null,
+ .fips_allowed = 1,
+ }, {
+ .alg = "cryptd(__driver-cbc-camellia-aesni)",
+ .test = alg_test_null,
+ }, {
+ .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
+ .test = alg_test_null,
+ }, {
+ .alg = "cryptd(__driver-cbc-serpent-avx2)",
+ .test = alg_test_null,
+ }, {
+ .alg = "cryptd(__driver-ecb-aes-aesni)",
+ .test = alg_test_null,
+ .fips_allowed = 1,
+ }, {
+ .alg = "cryptd(__driver-ecb-camellia-aesni)",
+ .test = alg_test_null,
+ }, {
+ .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
+ .test = alg_test_null,
+ }, {
+ .alg = "cryptd(__driver-ecb-cast5-avx)",
+ .test = alg_test_null,
+ }, {
+ .alg = "cryptd(__driver-ecb-cast6-avx)",
+ .test = alg_test_null,
+ }, {
+ .alg = "cryptd(__driver-ecb-serpent-avx)",
+ .test = alg_test_null,
+ }, {
+ .alg = "cryptd(__driver-ecb-serpent-avx2)",
+ .test = alg_test_null,
+ }, {
+ .alg = "cryptd(__driver-ecb-serpent-sse2)",
+ .test = alg_test_null,
+ }, {
+ .alg = "cryptd(__driver-ecb-twofish-avx)",
+ .test = alg_test_null,
+ }, {
+ .alg = "cryptd(__driver-gcm-aes-aesni)",
+ .test = alg_test_null,
+ .fips_allowed = 1,
+ }, {
+ .alg = "cryptd(__ghash-pclmulqdqni)",
+ .test = alg_test_null,
+ .fips_allowed = 1,
+ }, {
+ .alg = "ctr(aes)",
+ .test = alg_test_skcipher,
+ .fips_allowed = 1,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = aes_ctr_enc_tv_template,
+ .count = AES_CTR_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = aes_ctr_dec_tv_template,
+ .count = AES_CTR_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "ctr(blowfish)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = bf_ctr_enc_tv_template,
+ .count = BF_CTR_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = bf_ctr_dec_tv_template,
+ .count = BF_CTR_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "ctr(camellia)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = camellia_ctr_enc_tv_template,
+ .count = CAMELLIA_CTR_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = camellia_ctr_dec_tv_template,
+ .count = CAMELLIA_CTR_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "ctr(cast5)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = cast5_ctr_enc_tv_template,
+ .count = CAST5_CTR_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = cast5_ctr_dec_tv_template,
+ .count = CAST5_CTR_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "ctr(cast6)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = cast6_ctr_enc_tv_template,
+ .count = CAST6_CTR_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = cast6_ctr_dec_tv_template,
+ .count = CAST6_CTR_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "ctr(des)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = des_ctr_enc_tv_template,
+ .count = DES_CTR_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = des_ctr_dec_tv_template,
+ .count = DES_CTR_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "ctr(des3_ede)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = des3_ede_ctr_enc_tv_template,
+ .count = DES3_EDE_CTR_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = des3_ede_ctr_dec_tv_template,
+ .count = DES3_EDE_CTR_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "ctr(serpent)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = serpent_ctr_enc_tv_template,
+ .count = SERPENT_CTR_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = serpent_ctr_dec_tv_template,
+ .count = SERPENT_CTR_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "ctr(twofish)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = tf_ctr_enc_tv_template,
+ .count = TF_CTR_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = tf_ctr_dec_tv_template,
+ .count = TF_CTR_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
.alg = "cts(cbc(aes))",
.test = alg_test_skcipher,
.suite = {
@@ -1224,6 +2441,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "deflate",
.test = alg_test_comp,
+ .fips_allowed = 1,
.suite = {
.comp = {
.comp = {
@@ -1237,8 +2455,16 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "digest_null",
+ .test = alg_test_null,
+ }, {
+ .alg = "ecb(__aes-aesni)",
+ .test = alg_test_null,
+ .fips_allowed = 1,
+ }, {
.alg = "ecb(aes)",
.test = alg_test_skcipher,
+ .fips_allowed = 1,
.suite = {
.cipher = {
.enc = {
@@ -1342,8 +2568,12 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "ecb(cipher_null)",
+ .test = alg_test_null,
+ }, {
.alg = "ecb(des)",
.test = alg_test_skcipher,
+ .fips_allowed = 1,
.suite = {
.cipher = {
.enc = {
@@ -1359,6 +2589,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "ecb(des3_ede)",
.test = alg_test_skcipher,
+ .fips_allowed = 1,
.suite = {
.cipher = {
.enc = {
@@ -1372,6 +2603,21 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "ecb(fcrypt)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = fcrypt_pcbc_enc_tv_template,
+ .count = 1
+ },
+ .dec = {
+ .vecs = fcrypt_pcbc_dec_tv_template,
+ .count = 1
+ }
+ }
+ }
+ }, {
.alg = "ecb(khazad)",
.test = alg_test_skcipher,
.suite = {
@@ -1494,6 +2740,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "gcm(aes)",
.test = alg_test_aead,
+ .fips_allowed = 1,
.suite = {
.aead = {
.enc = {
@@ -1507,6 +2754,25 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "ghash",
+ .test = alg_test_hash,
+ .fips_allowed = 1,
+ .suite = {
+ .hash = {
+ .vecs = ghash_tv_template,
+ .count = GHASH_TEST_VECTORS
+ }
+ }
+ }, {
+ .alg = "hmac(crc32)",
+ .test = alg_test_hash,
+ .suite = {
+ .hash = {
+ .vecs = bfin_crc_tv_template,
+ .count = BFIN_CRC_TEST_VECTORS
+ }
+ }
+ }, {
.alg = "hmac(md5)",
.test = alg_test_hash,
.suite = {
@@ -1536,6 +2802,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "hmac(sha1)",
.test = alg_test_hash,
+ .fips_allowed = 1,
.suite = {
.hash = {
.vecs = hmac_sha1_tv_template,
@@ -1545,6 +2812,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "hmac(sha224)",
.test = alg_test_hash,
+ .fips_allowed = 1,
.suite = {
.hash = {
.vecs = hmac_sha224_tv_template,
@@ -1554,6 +2822,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "hmac(sha256)",
.test = alg_test_hash,
+ .fips_allowed = 1,
.suite = {
.hash = {
.vecs = hmac_sha256_tv_template,
@@ -1563,6 +2832,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "hmac(sha384)",
.test = alg_test_hash,
+ .fips_allowed = 1,
.suite = {
.hash = {
.vecs = hmac_sha384_tv_template,
@@ -1572,6 +2842,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "hmac(sha512)",
.test = alg_test_hash,
+ .fips_allowed = 1,
.suite = {
.hash = {
.vecs = hmac_sha512_tv_template,
@@ -1594,8 +2865,69 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "lrw(camellia)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = camellia_lrw_enc_tv_template,
+ .count = CAMELLIA_LRW_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = camellia_lrw_dec_tv_template,
+ .count = CAMELLIA_LRW_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "lrw(cast6)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = cast6_lrw_enc_tv_template,
+ .count = CAST6_LRW_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = cast6_lrw_dec_tv_template,
+ .count = CAST6_LRW_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "lrw(serpent)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = serpent_lrw_enc_tv_template,
+ .count = SERPENT_LRW_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = serpent_lrw_dec_tv_template,
+ .count = SERPENT_LRW_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "lrw(twofish)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = tf_lrw_enc_tv_template,
+ .count = TF_LRW_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = tf_lrw_dec_tv_template,
+ .count = TF_LRW_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
.alg = "lzo",
.test = alg_test_comp,
+ .fips_allowed = 1,
.suite = {
.comp = {
.comp = {
@@ -1636,6 +2968,22 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "ofb(aes)",
+ .test = alg_test_skcipher,
+ .fips_allowed = 1,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = aes_ofb_enc_tv_template,
+ .count = AES_OFB_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = aes_ofb_dec_tv_template,
+ .count = AES_OFB_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
.alg = "pcbc(fcrypt)",
.test = alg_test_skcipher,
.suite = {
@@ -1653,19 +3001,66 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "rfc3686(ctr(aes))",
.test = alg_test_skcipher,
+ .fips_allowed = 1,
.suite = {
.cipher = {
.enc = {
- .vecs = aes_ctr_enc_tv_template,
- .count = AES_CTR_ENC_TEST_VECTORS
+ .vecs = aes_ctr_rfc3686_enc_tv_template,
+ .count = AES_CTR_3686_ENC_TEST_VECTORS
},
.dec = {
- .vecs = aes_ctr_dec_tv_template,
- .count = AES_CTR_DEC_TEST_VECTORS
+ .vecs = aes_ctr_rfc3686_dec_tv_template,
+ .count = AES_CTR_3686_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "rfc4106(gcm(aes))",
+ .test = alg_test_aead,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs = aes_gcm_rfc4106_enc_tv_template,
+ .count = AES_GCM_4106_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = aes_gcm_rfc4106_dec_tv_template,
+ .count = AES_GCM_4106_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "rfc4309(ccm(aes))",
+ .test = alg_test_aead,
+ .fips_allowed = 1,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs = aes_ccm_rfc4309_enc_tv_template,
+ .count = AES_CCM_4309_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = aes_ccm_rfc4309_dec_tv_template,
+ .count = AES_CCM_4309_DEC_TEST_VECTORS
}
}
}
}, {
+ .alg = "rfc4543(gcm(aes))",
+ .test = alg_test_aead,
+ .suite = {
+ .aead = {
+ .enc = {
+ .vecs = aes_gcm_rfc4543_enc_tv_template,
+ .count = AES_GCM_4543_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = aes_gcm_rfc4543_dec_tv_template,
+ .count = AES_GCM_4543_DEC_TEST_VECTORS
+ },
+ }
+ }
+ }, {
.alg = "rmd128",
.test = alg_test_hash,
.suite = {
@@ -1715,6 +3110,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "sha1",
.test = alg_test_hash,
+ .fips_allowed = 1,
.suite = {
.hash = {
.vecs = sha1_tv_template,
@@ -1724,6 +3120,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "sha224",
.test = alg_test_hash,
+ .fips_allowed = 1,
.suite = {
.hash = {
.vecs = sha224_tv_template,
@@ -1733,6 +3130,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "sha256",
.test = alg_test_hash,
+ .fips_allowed = 1,
.suite = {
.hash = {
.vecs = sha256_tv_template,
@@ -1742,6 +3140,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "sha384",
.test = alg_test_hash,
+ .fips_allowed = 1,
.suite = {
.hash = {
.vecs = sha384_tv_template,
@@ -1751,6 +3150,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "sha512",
.test = alg_test_hash,
+ .fips_allowed = 1,
.suite = {
.hash = {
.vecs = sha512_tv_template,
@@ -1785,6 +3185,15 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "vmac(aes)",
+ .test = alg_test_hash,
+ .suite = {
+ .hash = {
+ .vecs = aes_vmac128_tv_template,
+ .count = VMAC_AES_TEST_VECTORS
+ }
+ }
+ }, {
.alg = "wp256",
.test = alg_test_hash,
.suite = {
@@ -1823,6 +3232,7 @@ static const struct alg_test_desc alg_test_descs[] = {
}, {
.alg = "xts(aes)",
.test = alg_test_skcipher,
+ .fips_allowed = 1,
.suite = {
.cipher = {
.enc = {
@@ -1835,9 +3245,114 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}
+ }, {
+ .alg = "xts(camellia)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = camellia_xts_enc_tv_template,
+ .count = CAMELLIA_XTS_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = camellia_xts_dec_tv_template,
+ .count = CAMELLIA_XTS_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "xts(cast6)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = cast6_xts_enc_tv_template,
+ .count = CAST6_XTS_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = cast6_xts_dec_tv_template,
+ .count = CAST6_XTS_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "xts(serpent)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = serpent_xts_enc_tv_template,
+ .count = SERPENT_XTS_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = serpent_xts_dec_tv_template,
+ .count = SERPENT_XTS_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "xts(twofish)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = tf_xts_enc_tv_template,
+ .count = TF_XTS_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = tf_xts_dec_tv_template,
+ .count = TF_XTS_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "zlib",
+ .test = alg_test_pcomp,
+ .fips_allowed = 1,
+ .suite = {
+ .pcomp = {
+ .comp = {
+ .vecs = zlib_comp_tv_template,
+ .count = ZLIB_COMP_TEST_VECTORS
+ },
+ .decomp = {
+ .vecs = zlib_decomp_tv_template,
+ .count = ZLIB_DECOMP_TEST_VECTORS
+ }
+ }
+ }
}
};
+static bool alg_test_descs_checked;
+
+static void alg_test_descs_check_order(void)
+{
+ int i;
+
+ /* only check once */
+ if (alg_test_descs_checked)
+ return;
+
+ alg_test_descs_checked = true;
+
+ for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
+ int diff = strcmp(alg_test_descs[i - 1].alg,
+ alg_test_descs[i].alg);
+
+ if (WARN_ON(diff > 0)) {
+ pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
+ alg_test_descs[i - 1].alg,
+ alg_test_descs[i].alg);
+ }
+
+ if (WARN_ON(diff == 0)) {
+ pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
+ alg_test_descs[i].alg);
+ }
+ }
+}
+
static int alg_find_test(const char *alg)
{
int start = 0;
@@ -1866,8 +3381,11 @@ static int alg_find_test(const char *alg)
int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
{
int i;
+ int j;
int rc;
+ alg_test_descs_check_order();
+
if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
char nalg[CRYPTO_MAX_ALG_NAME];
@@ -1879,60 +3397,47 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
if (i < 0)
goto notest;
- return alg_test_cipher(alg_test_descs + i, driver, type, mask);
+ if (fips_enabled && !alg_test_descs[i].fips_allowed)
+ goto non_fips_alg;
+
+ rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
+ goto test_done;
}
i = alg_find_test(alg);
- if (i < 0)
+ j = alg_find_test(driver);
+ if (i < 0 && j < 0)
goto notest;
- rc = alg_test_descs[i].test(alg_test_descs + i, driver,
- type, mask);
+ if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
+ (j >= 0 && !alg_test_descs[j].fips_allowed)))
+ goto non_fips_alg;
+
+ rc = 0;
+ if (i >= 0)
+ rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
+ type, mask);
+ if (j >= 0 && j != i)
+ rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
+ type, mask);
+
+test_done:
if (fips_enabled && rc)
panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
+ if (fips_enabled && !rc)
+ pr_info(KERN_INFO "alg: self-tests for %s (%s) passed\n",
+ driver, alg);
+
return rc;
notest:
printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
return 0;
-}
-EXPORT_SYMBOL_GPL(alg_test);
-
-int __init testmgr_init(void)
-{
- int i;
-
- for (i = 0; i < XBUFSIZE; i++) {
- xbuf[i] = (void *)__get_free_page(GFP_KERNEL);
- if (!xbuf[i])
- goto err_free_xbuf;
- }
-
- for (i = 0; i < XBUFSIZE; i++) {
- axbuf[i] = (void *)__get_free_page(GFP_KERNEL);
- if (!axbuf[i])
- goto err_free_axbuf;
- }
-
- return 0;
-
-err_free_axbuf:
- for (i = 0; i < XBUFSIZE && axbuf[i]; i++)
- free_page((unsigned long)axbuf[i]);
-err_free_xbuf:
- for (i = 0; i < XBUFSIZE && xbuf[i]; i++)
- free_page((unsigned long)xbuf[i]);
-
- return -ENOMEM;
+non_fips_alg:
+ return -EINVAL;
}
-void testmgr_exit(void)
-{
- int i;
+#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
- for (i = 0; i < XBUFSIZE; i++)
- free_page((unsigned long)axbuf[i]);
- for (i = 0; i < XBUFSIZE; i++)
- free_page((unsigned long)xbuf[i]);
-}
+EXPORT_SYMBOL_GPL(alg_test);
diff --git a/crypto/testmgr.h b/crypto/testmgr.h
index 132953e144d..69d0dd8ef27 100644
--- a/crypto/testmgr.h
+++ b/crypto/testmgr.h
@@ -6,6 +6,15 @@
* Copyright (c) 2007 Nokia Siemens Networks
* Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
*
+ * Updated RFC4106 AES-GCM testing. Some test vectors were taken from
+ * http://csrc.nist.gov/groups/ST/toolkit/BCM/documents/proposedmodes/
+ * gcm/gcm-test-vectors.tar.gz
+ * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
+ * Adrian Hoban <adrian.hoban@intel.com>
+ * Gabriele Paoloni <gabriele.paoloni@intel.com>
+ * Tadeusz Struk (tadeusz.struk@intel.com)
+ * Copyright (c) 2010, Intel Corporation.
+ *
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
@@ -15,6 +24,11 @@
#ifndef _CRYPTO_TESTMGR_H
#define _CRYPTO_TESTMGR_H
+#include <linux/netlink.h>
+#include <linux/zlib.h>
+
+#include <crypto/compress.h>
+
#define MAX_DIGEST_SIZE 64
#define MAX_TAP 8
@@ -27,7 +41,7 @@ struct hash_testvec {
char *plaintext;
char *digest;
unsigned char tap[MAX_TAP];
- unsigned char psize;
+ unsigned short psize;
unsigned char np;
unsigned char ksize;
};
@@ -39,6 +53,7 @@ struct cipher_testvec {
char *result;
unsigned short tap[MAX_TAP];
int np;
+ unsigned char also_non_np;
unsigned char fail;
unsigned char wk; /* weak key flag */
unsigned char klen;
@@ -57,6 +72,7 @@ struct aead_testvec {
int np;
int anp;
unsigned char fail;
+ unsigned char novrfy; /* ccm dec verification failure expected */
unsigned char wk; /* weak key flag */
unsigned char klen;
unsigned short ilen;
@@ -64,6 +80,18 @@ struct aead_testvec {
unsigned short rlen;
};
+struct cprng_testvec {
+ char *key;
+ char *dt;
+ char *v;
+ char *result;
+ unsigned char klen;
+ unsigned short dtlen;
+ unsigned short vlen;
+ unsigned short rlen;
+ unsigned short loops;
+};
+
static char zeroed_string[48];
/*
@@ -422,13 +450,52 @@ static struct hash_testvec rmd320_tv_template[] = {
}
};
+#define CRCT10DIF_TEST_VECTORS 3
+static struct hash_testvec crct10dif_tv_template[] = {
+ {
+ .plaintext = "abc",
+ .psize = 3,
+#ifdef __LITTLE_ENDIAN
+ .digest = "\x3b\x44",
+#else
+ .digest = "\x44\x3b",
+#endif
+ }, {
+ .plaintext = "1234567890123456789012345678901234567890"
+ "123456789012345678901234567890123456789",
+ .psize = 79,
+#ifdef __LITTLE_ENDIAN
+ .digest = "\x70\x4b",
+#else
+ .digest = "\x4b\x70",
+#endif
+ }, {
+ .plaintext =
+ "abcddddddddddddddddddddddddddddddddddddddddddddddddddddd",
+ .psize = 56,
+#ifdef __LITTLE_ENDIAN
+ .digest = "\xe3\x9c",
+#else
+ .digest = "\x9c\xe3",
+#endif
+ .np = 2,
+ .tap = { 28, 28 }
+ }
+};
+
/*
* SHA1 test vectors from from FIPS PUB 180-1
+ * Long vector from CAVS 5.0
*/
-#define SHA1_TEST_VECTORS 2
+#define SHA1_TEST_VECTORS 6
static struct hash_testvec sha1_tv_template[] = {
{
+ .plaintext = "",
+ .psize = 0,
+ .digest = "\xda\x39\xa3\xee\x5e\x6b\x4b\x0d\x32\x55"
+ "\xbf\xef\x95\x60\x18\x90\xaf\xd8\x07\x09",
+ }, {
.plaintext = "abc",
.psize = 3,
.digest = "\xa9\x99\x3e\x36\x47\x06\x81\x6a\xba\x3e"
@@ -440,6 +507,171 @@ static struct hash_testvec sha1_tv_template[] = {
"\x4a\xa1\xf9\x51\x29\xe5\xe5\x46\x70\xf1",
.np = 2,
.tap = { 28, 28 }
+ }, {
+ .plaintext = "\xec\x29\x56\x12\x44\xed\xe7\x06"
+ "\xb6\xeb\x30\xa1\xc3\x71\xd7\x44"
+ "\x50\xa1\x05\xc3\xf9\x73\x5f\x7f"
+ "\xa9\xfe\x38\xcf\x67\xf3\x04\xa5"
+ "\x73\x6a\x10\x6e\x92\xe1\x71\x39"
+ "\xa6\x81\x3b\x1c\x81\xa4\xf3\xd3"
+ "\xfb\x95\x46\xab\x42\x96\xfa\x9f"
+ "\x72\x28\x26\xc0\x66\x86\x9e\xda"
+ "\xcd\x73\xb2\x54\x80\x35\x18\x58"
+ "\x13\xe2\x26\x34\xa9\xda\x44\x00"
+ "\x0d\x95\xa2\x81\xff\x9f\x26\x4e"
+ "\xcc\xe0\xa9\x31\x22\x21\x62\xd0"
+ "\x21\xcc\xa2\x8d\xb5\xf3\xc2\xaa"
+ "\x24\x94\x5a\xb1\xe3\x1c\xb4\x13"
+ "\xae\x29\x81\x0f\xd7\x94\xca\xd5"
+ "\xdf\xaf\x29\xec\x43\xcb\x38\xd1"
+ "\x98\xfe\x4a\xe1\xda\x23\x59\x78"
+ "\x02\x21\x40\x5b\xd6\x71\x2a\x53"
+ "\x05\xda\x4b\x1b\x73\x7f\xce\x7c"
+ "\xd2\x1c\x0e\xb7\x72\x8d\x08\x23"
+ "\x5a\x90\x11",
+ .psize = 163,
+ .digest = "\x97\x01\x11\xc4\xe7\x7b\xcc\x88\xcc\x20"
+ "\x45\x9c\x02\xb6\x9b\x4a\xa8\xf5\x82\x17",
+ .np = 4,
+ .tap = { 63, 64, 31, 5 }
+ }, {
+ .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+-",
+ .psize = 64,
+ .digest = "\xc8\x71\xf6\x9a\x63\xcc\xa9\x84\x84\x82"
+ "\x64\xe7\x79\x95\x5d\xd7\x19\x41\x7c\x91",
+ }, {
+ .plaintext = "\x08\x9f\x13\xaa\x41\xd8\x4c\xe3"
+ "\x7a\x11\x85\x1c\xb3\x27\xbe\x55"
+ "\xec\x60\xf7\x8e\x02\x99\x30\xc7"
+ "\x3b\xd2\x69\x00\x74\x0b\xa2\x16"
+ "\xad\x44\xdb\x4f\xe6\x7d\x14\x88"
+ "\x1f\xb6\x2a\xc1\x58\xef\x63\xfa"
+ "\x91\x05\x9c\x33\xca\x3e\xd5\x6c"
+ "\x03\x77\x0e\xa5\x19\xb0\x47\xde"
+ "\x52\xe9\x80\x17\x8b\x22\xb9\x2d"
+ "\xc4\x5b\xf2\x66\xfd\x94\x08\x9f"
+ "\x36\xcd\x41\xd8\x6f\x06\x7a\x11"
+ "\xa8\x1c\xb3\x4a\xe1\x55\xec\x83"
+ "\x1a\x8e\x25\xbc\x30\xc7\x5e\xf5"
+ "\x69\x00\x97\x0b\xa2\x39\xd0\x44"
+ "\xdb\x72\x09\x7d\x14\xab\x1f\xb6"
+ "\x4d\xe4\x58\xef\x86\x1d\x91\x28"
+ "\xbf\x33\xca\x61\xf8\x6c\x03\x9a"
+ "\x0e\xa5\x3c\xd3\x47\xde\x75\x0c"
+ "\x80\x17\xae\x22\xb9\x50\xe7\x5b"
+ "\xf2\x89\x20\x94\x2b\xc2\x36\xcd"
+ "\x64\xfb\x6f\x06\x9d\x11\xa8\x3f"
+ "\xd6\x4a\xe1\x78\x0f\x83\x1a\xb1"
+ "\x25\xbc\x53\xea\x5e\xf5\x8c\x00"
+ "\x97\x2e\xc5\x39\xd0\x67\xfe\x72"
+ "\x09\xa0\x14\xab\x42\xd9\x4d\xe4"
+ "\x7b\x12\x86\x1d\xb4\x28\xbf\x56"
+ "\xed\x61\xf8\x8f\x03\x9a\x31\xc8"
+ "\x3c\xd3\x6a\x01\x75\x0c\xa3\x17"
+ "\xae\x45\xdc\x50\xe7\x7e\x15\x89"
+ "\x20\xb7\x2b\xc2\x59\xf0\x64\xfb"
+ "\x92\x06\x9d\x34\xcb\x3f\xd6\x6d"
+ "\x04\x78\x0f\xa6\x1a\xb1\x48\xdf"
+ "\x53\xea\x81\x18\x8c\x23\xba\x2e"
+ "\xc5\x5c\xf3\x67\xfe\x95\x09\xa0"
+ "\x37\xce\x42\xd9\x70\x07\x7b\x12"
+ "\xa9\x1d\xb4\x4b\xe2\x56\xed\x84"
+ "\x1b\x8f\x26\xbd\x31\xc8\x5f\xf6"
+ "\x6a\x01\x98\x0c\xa3\x3a\xd1\x45"
+ "\xdc\x73\x0a\x7e\x15\xac\x20\xb7"
+ "\x4e\xe5\x59\xf0\x87\x1e\x92\x29"
+ "\xc0\x34\xcb\x62\xf9\x6d\x04\x9b"
+ "\x0f\xa6\x3d\xd4\x48\xdf\x76\x0d"
+ "\x81\x18\xaf\x23\xba\x51\xe8\x5c"
+ "\xf3\x8a\x21\x95\x2c\xc3\x37\xce"
+ "\x65\xfc\x70\x07\x9e\x12\xa9\x40"
+ "\xd7\x4b\xe2\x79\x10\x84\x1b\xb2"
+ "\x26\xbd\x54\xeb\x5f\xf6\x8d\x01"
+ "\x98\x2f\xc6\x3a\xd1\x68\xff\x73"
+ "\x0a\xa1\x15\xac\x43\xda\x4e\xe5"
+ "\x7c\x13\x87\x1e\xb5\x29\xc0\x57"
+ "\xee\x62\xf9\x90\x04\x9b\x32\xc9"
+ "\x3d\xd4\x6b\x02\x76\x0d\xa4\x18"
+ "\xaf\x46\xdd\x51\xe8\x7f\x16\x8a"
+ "\x21\xb8\x2c\xc3\x5a\xf1\x65\xfc"
+ "\x93\x07\x9e\x35\xcc\x40\xd7\x6e"
+ "\x05\x79\x10\xa7\x1b\xb2\x49\xe0"
+ "\x54\xeb\x82\x19\x8d\x24\xbb\x2f"
+ "\xc6\x5d\xf4\x68\xff\x96\x0a\xa1"
+ "\x38\xcf\x43\xda\x71\x08\x7c\x13"
+ "\xaa\x1e\xb5\x4c\xe3\x57\xee\x85"
+ "\x1c\x90\x27\xbe\x32\xc9\x60\xf7"
+ "\x6b\x02\x99\x0d\xa4\x3b\xd2\x46"
+ "\xdd\x74\x0b\x7f\x16\xad\x21\xb8"
+ "\x4f\xe6\x5a\xf1\x88\x1f\x93\x2a"
+ "\xc1\x35\xcc\x63\xfa\x6e\x05\x9c"
+ "\x10\xa7\x3e\xd5\x49\xe0\x77\x0e"
+ "\x82\x19\xb0\x24\xbb\x52\xe9\x5d"
+ "\xf4\x8b\x22\x96\x2d\xc4\x38\xcf"
+ "\x66\xfd\x71\x08\x9f\x13\xaa\x41"
+ "\xd8\x4c\xe3\x7a\x11\x85\x1c\xb3"
+ "\x27\xbe\x55\xec\x60\xf7\x8e\x02"
+ "\x99\x30\xc7\x3b\xd2\x69\x00\x74"
+ "\x0b\xa2\x16\xad\x44\xdb\x4f\xe6"
+ "\x7d\x14\x88\x1f\xb6\x2a\xc1\x58"
+ "\xef\x63\xfa\x91\x05\x9c\x33\xca"
+ "\x3e\xd5\x6c\x03\x77\x0e\xa5\x19"
+ "\xb0\x47\xde\x52\xe9\x80\x17\x8b"
+ "\x22\xb9\x2d\xc4\x5b\xf2\x66\xfd"
+ "\x94\x08\x9f\x36\xcd\x41\xd8\x6f"
+ "\x06\x7a\x11\xa8\x1c\xb3\x4a\xe1"
+ "\x55\xec\x83\x1a\x8e\x25\xbc\x30"
+ "\xc7\x5e\xf5\x69\x00\x97\x0b\xa2"
+ "\x39\xd0\x44\xdb\x72\x09\x7d\x14"
+ "\xab\x1f\xb6\x4d\xe4\x58\xef\x86"
+ "\x1d\x91\x28\xbf\x33\xca\x61\xf8"
+ "\x6c\x03\x9a\x0e\xa5\x3c\xd3\x47"
+ "\xde\x75\x0c\x80\x17\xae\x22\xb9"
+ "\x50\xe7\x5b\xf2\x89\x20\x94\x2b"
+ "\xc2\x36\xcd\x64\xfb\x6f\x06\x9d"
+ "\x11\xa8\x3f\xd6\x4a\xe1\x78\x0f"
+ "\x83\x1a\xb1\x25\xbc\x53\xea\x5e"
+ "\xf5\x8c\x00\x97\x2e\xc5\x39\xd0"
+ "\x67\xfe\x72\x09\xa0\x14\xab\x42"
+ "\xd9\x4d\xe4\x7b\x12\x86\x1d\xb4"
+ "\x28\xbf\x56\xed\x61\xf8\x8f\x03"
+ "\x9a\x31\xc8\x3c\xd3\x6a\x01\x75"
+ "\x0c\xa3\x17\xae\x45\xdc\x50\xe7"
+ "\x7e\x15\x89\x20\xb7\x2b\xc2\x59"
+ "\xf0\x64\xfb\x92\x06\x9d\x34\xcb"
+ "\x3f\xd6\x6d\x04\x78\x0f\xa6\x1a"
+ "\xb1\x48\xdf\x53\xea\x81\x18\x8c"
+ "\x23\xba\x2e\xc5\x5c\xf3\x67\xfe"
+ "\x95\x09\xa0\x37\xce\x42\xd9\x70"
+ "\x07\x7b\x12\xa9\x1d\xb4\x4b\xe2"
+ "\x56\xed\x84\x1b\x8f\x26\xbd\x31"
+ "\xc8\x5f\xf6\x6a\x01\x98\x0c\xa3"
+ "\x3a\xd1\x45\xdc\x73\x0a\x7e\x15"
+ "\xac\x20\xb7\x4e\xe5\x59\xf0\x87"
+ "\x1e\x92\x29\xc0\x34\xcb\x62\xf9"
+ "\x6d\x04\x9b\x0f\xa6\x3d\xd4\x48"
+ "\xdf\x76\x0d\x81\x18\xaf\x23\xba"
+ "\x51\xe8\x5c\xf3\x8a\x21\x95\x2c"
+ "\xc3\x37\xce\x65\xfc\x70\x07\x9e"
+ "\x12\xa9\x40\xd7\x4b\xe2\x79\x10"
+ "\x84\x1b\xb2\x26\xbd\x54\xeb\x5f"
+ "\xf6\x8d\x01\x98\x2f\xc6\x3a\xd1"
+ "\x68\xff\x73\x0a\xa1\x15\xac\x43"
+ "\xda\x4e\xe5\x7c\x13\x87\x1e\xb5"
+ "\x29\xc0\x57\xee\x62\xf9\x90\x04"
+ "\x9b\x32\xc9\x3d\xd4\x6b\x02\x76"
+ "\x0d\xa4\x18\xaf\x46\xdd\x51\xe8"
+ "\x7f\x16\x8a\x21\xb8\x2c\xc3\x5a"
+ "\xf1\x65\xfc\x93\x07\x9e\x35\xcc"
+ "\x40\xd7\x6e\x05\x79\x10\xa7\x1b"
+ "\xb2\x49\xe0\x54\xeb\x82\x19\x8d"
+ "\x24\xbb\x2f\xc6\x5d\xf4\x68\xff"
+ "\x96\x0a\xa1\x38\xcf\x43\xda\x71"
+ "\x08\x7c\x13\xaa\x1e\xb5\x4c",
+ .psize = 1023,
+ .digest = "\xb8\xe3\x54\xed\xc5\xfc\xef\xa4"
+ "\x55\x73\x4a\x81\x99\xe4\x47\x2a"
+ "\x30\xd6\xc9\x85",
}
};
@@ -447,10 +679,17 @@ static struct hash_testvec sha1_tv_template[] = {
/*
* SHA224 test vectors from from FIPS PUB 180-2
*/
-#define SHA224_TEST_VECTORS 2
+#define SHA224_TEST_VECTORS 5
static struct hash_testvec sha224_tv_template[] = {
{
+ .plaintext = "",
+ .psize = 0,
+ .digest = "\xd1\x4a\x02\x8c\x2a\x3a\x2b\xc9"
+ "\x47\x61\x02\xbb\x28\x82\x34\xc4"
+ "\x15\xa2\xb0\x1f\x82\x8e\xa6\x2a"
+ "\xc5\xb3\xe4\x2f",
+ }, {
.plaintext = "abc",
.psize = 3,
.digest = "\x23\x09\x7D\x22\x34\x05\xD8\x22"
@@ -467,16 +706,164 @@ static struct hash_testvec sha224_tv_template[] = {
"\x52\x52\x25\x25",
.np = 2,
.tap = { 28, 28 }
+ }, {
+ .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+-",
+ .psize = 64,
+ .digest = "\xc4\xdb\x2b\x3a\x58\xc3\x99\x01"
+ "\x42\xfd\x10\x92\xaa\x4e\x04\x08"
+ "\x58\xbb\xbb\xe8\xf8\x14\xa7\x0c"
+ "\xef\x3b\xcb\x0e",
+ }, {
+ .plaintext = "\x08\x9f\x13\xaa\x41\xd8\x4c\xe3"
+ "\x7a\x11\x85\x1c\xb3\x27\xbe\x55"
+ "\xec\x60\xf7\x8e\x02\x99\x30\xc7"
+ "\x3b\xd2\x69\x00\x74\x0b\xa2\x16"
+ "\xad\x44\xdb\x4f\xe6\x7d\x14\x88"
+ "\x1f\xb6\x2a\xc1\x58\xef\x63\xfa"
+ "\x91\x05\x9c\x33\xca\x3e\xd5\x6c"
+ "\x03\x77\x0e\xa5\x19\xb0\x47\xde"
+ "\x52\xe9\x80\x17\x8b\x22\xb9\x2d"
+ "\xc4\x5b\xf2\x66\xfd\x94\x08\x9f"
+ "\x36\xcd\x41\xd8\x6f\x06\x7a\x11"
+ "\xa8\x1c\xb3\x4a\xe1\x55\xec\x83"
+ "\x1a\x8e\x25\xbc\x30\xc7\x5e\xf5"
+ "\x69\x00\x97\x0b\xa2\x39\xd0\x44"
+ "\xdb\x72\x09\x7d\x14\xab\x1f\xb6"
+ "\x4d\xe4\x58\xef\x86\x1d\x91\x28"
+ "\xbf\x33\xca\x61\xf8\x6c\x03\x9a"
+ "\x0e\xa5\x3c\xd3\x47\xde\x75\x0c"
+ "\x80\x17\xae\x22\xb9\x50\xe7\x5b"
+ "\xf2\x89\x20\x94\x2b\xc2\x36\xcd"
+ "\x64\xfb\x6f\x06\x9d\x11\xa8\x3f"
+ "\xd6\x4a\xe1\x78\x0f\x83\x1a\xb1"
+ "\x25\xbc\x53\xea\x5e\xf5\x8c\x00"
+ "\x97\x2e\xc5\x39\xd0\x67\xfe\x72"
+ "\x09\xa0\x14\xab\x42\xd9\x4d\xe4"
+ "\x7b\x12\x86\x1d\xb4\x28\xbf\x56"
+ "\xed\x61\xf8\x8f\x03\x9a\x31\xc8"
+ "\x3c\xd3\x6a\x01\x75\x0c\xa3\x17"
+ "\xae\x45\xdc\x50\xe7\x7e\x15\x89"
+ "\x20\xb7\x2b\xc2\x59\xf0\x64\xfb"
+ "\x92\x06\x9d\x34\xcb\x3f\xd6\x6d"
+ "\x04\x78\x0f\xa6\x1a\xb1\x48\xdf"
+ "\x53\xea\x81\x18\x8c\x23\xba\x2e"
+ "\xc5\x5c\xf3\x67\xfe\x95\x09\xa0"
+ "\x37\xce\x42\xd9\x70\x07\x7b\x12"
+ "\xa9\x1d\xb4\x4b\xe2\x56\xed\x84"
+ "\x1b\x8f\x26\xbd\x31\xc8\x5f\xf6"
+ "\x6a\x01\x98\x0c\xa3\x3a\xd1\x45"
+ "\xdc\x73\x0a\x7e\x15\xac\x20\xb7"
+ "\x4e\xe5\x59\xf0\x87\x1e\x92\x29"
+ "\xc0\x34\xcb\x62\xf9\x6d\x04\x9b"
+ "\x0f\xa6\x3d\xd4\x48\xdf\x76\x0d"
+ "\x81\x18\xaf\x23\xba\x51\xe8\x5c"
+ "\xf3\x8a\x21\x95\x2c\xc3\x37\xce"
+ "\x65\xfc\x70\x07\x9e\x12\xa9\x40"
+ "\xd7\x4b\xe2\x79\x10\x84\x1b\xb2"
+ "\x26\xbd\x54\xeb\x5f\xf6\x8d\x01"
+ "\x98\x2f\xc6\x3a\xd1\x68\xff\x73"
+ "\x0a\xa1\x15\xac\x43\xda\x4e\xe5"
+ "\x7c\x13\x87\x1e\xb5\x29\xc0\x57"
+ "\xee\x62\xf9\x90\x04\x9b\x32\xc9"
+ "\x3d\xd4\x6b\x02\x76\x0d\xa4\x18"
+ "\xaf\x46\xdd\x51\xe8\x7f\x16\x8a"
+ "\x21\xb8\x2c\xc3\x5a\xf1\x65\xfc"
+ "\x93\x07\x9e\x35\xcc\x40\xd7\x6e"
+ "\x05\x79\x10\xa7\x1b\xb2\x49\xe0"
+ "\x54\xeb\x82\x19\x8d\x24\xbb\x2f"
+ "\xc6\x5d\xf4\x68\xff\x96\x0a\xa1"
+ "\x38\xcf\x43\xda\x71\x08\x7c\x13"
+ "\xaa\x1e\xb5\x4c\xe3\x57\xee\x85"
+ "\x1c\x90\x27\xbe\x32\xc9\x60\xf7"
+ "\x6b\x02\x99\x0d\xa4\x3b\xd2\x46"
+ "\xdd\x74\x0b\x7f\x16\xad\x21\xb8"
+ "\x4f\xe6\x5a\xf1\x88\x1f\x93\x2a"
+ "\xc1\x35\xcc\x63\xfa\x6e\x05\x9c"
+ "\x10\xa7\x3e\xd5\x49\xe0\x77\x0e"
+ "\x82\x19\xb0\x24\xbb\x52\xe9\x5d"
+ "\xf4\x8b\x22\x96\x2d\xc4\x38\xcf"
+ "\x66\xfd\x71\x08\x9f\x13\xaa\x41"
+ "\xd8\x4c\xe3\x7a\x11\x85\x1c\xb3"
+ "\x27\xbe\x55\xec\x60\xf7\x8e\x02"
+ "\x99\x30\xc7\x3b\xd2\x69\x00\x74"
+ "\x0b\xa2\x16\xad\x44\xdb\x4f\xe6"
+ "\x7d\x14\x88\x1f\xb6\x2a\xc1\x58"
+ "\xef\x63\xfa\x91\x05\x9c\x33\xca"
+ "\x3e\xd5\x6c\x03\x77\x0e\xa5\x19"
+ "\xb0\x47\xde\x52\xe9\x80\x17\x8b"
+ "\x22\xb9\x2d\xc4\x5b\xf2\x66\xfd"
+ "\x94\x08\x9f\x36\xcd\x41\xd8\x6f"
+ "\x06\x7a\x11\xa8\x1c\xb3\x4a\xe1"
+ "\x55\xec\x83\x1a\x8e\x25\xbc\x30"
+ "\xc7\x5e\xf5\x69\x00\x97\x0b\xa2"
+ "\x39\xd0\x44\xdb\x72\x09\x7d\x14"
+ "\xab\x1f\xb6\x4d\xe4\x58\xef\x86"
+ "\x1d\x91\x28\xbf\x33\xca\x61\xf8"
+ "\x6c\x03\x9a\x0e\xa5\x3c\xd3\x47"
+ "\xde\x75\x0c\x80\x17\xae\x22\xb9"
+ "\x50\xe7\x5b\xf2\x89\x20\x94\x2b"
+ "\xc2\x36\xcd\x64\xfb\x6f\x06\x9d"
+ "\x11\xa8\x3f\xd6\x4a\xe1\x78\x0f"
+ "\x83\x1a\xb1\x25\xbc\x53\xea\x5e"
+ "\xf5\x8c\x00\x97\x2e\xc5\x39\xd0"
+ "\x67\xfe\x72\x09\xa0\x14\xab\x42"
+ "\xd9\x4d\xe4\x7b\x12\x86\x1d\xb4"
+ "\x28\xbf\x56\xed\x61\xf8\x8f\x03"
+ "\x9a\x31\xc8\x3c\xd3\x6a\x01\x75"
+ "\x0c\xa3\x17\xae\x45\xdc\x50\xe7"
+ "\x7e\x15\x89\x20\xb7\x2b\xc2\x59"
+ "\xf0\x64\xfb\x92\x06\x9d\x34\xcb"
+ "\x3f\xd6\x6d\x04\x78\x0f\xa6\x1a"
+ "\xb1\x48\xdf\x53\xea\x81\x18\x8c"
+ "\x23\xba\x2e\xc5\x5c\xf3\x67\xfe"
+ "\x95\x09\xa0\x37\xce\x42\xd9\x70"
+ "\x07\x7b\x12\xa9\x1d\xb4\x4b\xe2"
+ "\x56\xed\x84\x1b\x8f\x26\xbd\x31"
+ "\xc8\x5f\xf6\x6a\x01\x98\x0c\xa3"
+ "\x3a\xd1\x45\xdc\x73\x0a\x7e\x15"
+ "\xac\x20\xb7\x4e\xe5\x59\xf0\x87"
+ "\x1e\x92\x29\xc0\x34\xcb\x62\xf9"
+ "\x6d\x04\x9b\x0f\xa6\x3d\xd4\x48"
+ "\xdf\x76\x0d\x81\x18\xaf\x23\xba"
+ "\x51\xe8\x5c\xf3\x8a\x21\x95\x2c"
+ "\xc3\x37\xce\x65\xfc\x70\x07\x9e"
+ "\x12\xa9\x40\xd7\x4b\xe2\x79\x10"
+ "\x84\x1b\xb2\x26\xbd\x54\xeb\x5f"
+ "\xf6\x8d\x01\x98\x2f\xc6\x3a\xd1"
+ "\x68\xff\x73\x0a\xa1\x15\xac\x43"
+ "\xda\x4e\xe5\x7c\x13\x87\x1e\xb5"
+ "\x29\xc0\x57\xee\x62\xf9\x90\x04"
+ "\x9b\x32\xc9\x3d\xd4\x6b\x02\x76"
+ "\x0d\xa4\x18\xaf\x46\xdd\x51\xe8"
+ "\x7f\x16\x8a\x21\xb8\x2c\xc3\x5a"
+ "\xf1\x65\xfc\x93\x07\x9e\x35\xcc"
+ "\x40\xd7\x6e\x05\x79\x10\xa7\x1b"
+ "\xb2\x49\xe0\x54\xeb\x82\x19\x8d"
+ "\x24\xbb\x2f\xc6\x5d\xf4\x68\xff"
+ "\x96\x0a\xa1\x38\xcf\x43\xda\x71"
+ "\x08\x7c\x13\xaa\x1e\xb5\x4c",
+ .psize = 1023,
+ .digest = "\x98\x43\x07\x63\x75\xe0\xa7\x1c"
+ "\x78\xb1\x8b\xfd\x04\xf5\x2d\x91"
+ "\x20\x48\xa4\x28\xff\x55\xb1\xd3"
+ "\xe6\xf9\x4f\xcc",
}
};
/*
* SHA256 test vectors from from NIST
*/
-#define SHA256_TEST_VECTORS 2
+#define SHA256_TEST_VECTORS 5
static struct hash_testvec sha256_tv_template[] = {
{
+ .plaintext = "",
+ .psize = 0,
+ .digest = "\xe3\xb0\xc4\x42\x98\xfc\x1c\x14"
+ "\x9a\xfb\xf4\xc8\x99\x6f\xb9\x24"
+ "\x27\xae\x41\xe4\x64\x9b\x93\x4c"
+ "\xa4\x95\x99\x1b\x78\x52\xb8\x55",
+ }, {
.plaintext = "abc",
.psize = 3,
.digest = "\xba\x78\x16\xbf\x8f\x01\xcf\xea"
@@ -492,16 +879,166 @@ static struct hash_testvec sha256_tv_template[] = {
"\xf6\xec\xed\xd4\x19\xdb\x06\xc1",
.np = 2,
.tap = { 28, 28 }
- },
+ }, {
+ .plaintext = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+-",
+ .psize = 64,
+ .digest = "\xb5\xfe\xad\x56\x7d\xff\xcb\xa4"
+ "\x2c\x32\x29\x32\x19\xbb\xfb\xfa"
+ "\xd6\xff\x94\xa3\x72\x91\x85\x66"
+ "\x3b\xa7\x87\x77\x58\xa3\x40\x3a",
+ }, {
+ .plaintext = "\x08\x9f\x13\xaa\x41\xd8\x4c\xe3"
+ "\x7a\x11\x85\x1c\xb3\x27\xbe\x55"
+ "\xec\x60\xf7\x8e\x02\x99\x30\xc7"
+ "\x3b\xd2\x69\x00\x74\x0b\xa2\x16"
+ "\xad\x44\xdb\x4f\xe6\x7d\x14\x88"
+ "\x1f\xb6\x2a\xc1\x58\xef\x63\xfa"
+ "\x91\x05\x9c\x33\xca\x3e\xd5\x6c"
+ "\x03\x77\x0e\xa5\x19\xb0\x47\xde"
+ "\x52\xe9\x80\x17\x8b\x22\xb9\x2d"
+ "\xc4\x5b\xf2\x66\xfd\x94\x08\x9f"
+ "\x36\xcd\x41\xd8\x6f\x06\x7a\x11"
+ "\xa8\x1c\xb3\x4a\xe1\x55\xec\x83"
+ "\x1a\x8e\x25\xbc\x30\xc7\x5e\xf5"
+ "\x69\x00\x97\x0b\xa2\x39\xd0\x44"
+ "\xdb\x72\x09\x7d\x14\xab\x1f\xb6"
+ "\x4d\xe4\x58\xef\x86\x1d\x91\x28"
+ "\xbf\x33\xca\x61\xf8\x6c\x03\x9a"
+ "\x0e\xa5\x3c\xd3\x47\xde\x75\x0c"
+ "\x80\x17\xae\x22\xb9\x50\xe7\x5b"
+ "\xf2\x89\x20\x94\x2b\xc2\x36\xcd"
+ "\x64\xfb\x6f\x06\x9d\x11\xa8\x3f"
+ "\xd6\x4a\xe1\x78\x0f\x83\x1a\xb1"
+ "\x25\xbc\x53\xea\x5e\xf5\x8c\x00"
+ "\x97\x2e\xc5\x39\xd0\x67\xfe\x72"
+ "\x09\xa0\x14\xab\x42\xd9\x4d\xe4"
+ "\x7b\x12\x86\x1d\xb4\x28\xbf\x56"
+ "\xed\x61\xf8\x8f\x03\x9a\x31\xc8"
+ "\x3c\xd3\x6a\x01\x75\x0c\xa3\x17"
+ "\xae\x45\xdc\x50\xe7\x7e\x15\x89"
+ "\x20\xb7\x2b\xc2\x59\xf0\x64\xfb"
+ "\x92\x06\x9d\x34\xcb\x3f\xd6\x6d"
+ "\x04\x78\x0f\xa6\x1a\xb1\x48\xdf"
+ "\x53\xea\x81\x18\x8c\x23\xba\x2e"
+ "\xc5\x5c\xf3\x67\xfe\x95\x09\xa0"
+ "\x37\xce\x42\xd9\x70\x07\x7b\x12"
+ "\xa9\x1d\xb4\x4b\xe2\x56\xed\x84"
+ "\x1b\x8f\x26\xbd\x31\xc8\x5f\xf6"
+ "\x6a\x01\x98\x0c\xa3\x3a\xd1\x45"
+ "\xdc\x73\x0a\x7e\x15\xac\x20\xb7"
+ "\x4e\xe5\x59\xf0\x87\x1e\x92\x29"
+ "\xc0\x34\xcb\x62\xf9\x6d\x04\x9b"
+ "\x0f\xa6\x3d\xd4\x48\xdf\x76\x0d"
+ "\x81\x18\xaf\x23\xba\x51\xe8\x5c"
+ "\xf3\x8a\x21\x95\x2c\xc3\x37\xce"
+ "\x65\xfc\x70\x07\x9e\x12\xa9\x40"
+ "\xd7\x4b\xe2\x79\x10\x84\x1b\xb2"
+ "\x26\xbd\x54\xeb\x5f\xf6\x8d\x01"
+ "\x98\x2f\xc6\x3a\xd1\x68\xff\x73"
+ "\x0a\xa1\x15\xac\x43\xda\x4e\xe5"
+ "\x7c\x13\x87\x1e\xb5\x29\xc0\x57"
+ "\xee\x62\xf9\x90\x04\x9b\x32\xc9"
+ "\x3d\xd4\x6b\x02\x76\x0d\xa4\x18"
+ "\xaf\x46\xdd\x51\xe8\x7f\x16\x8a"
+ "\x21\xb8\x2c\xc3\x5a\xf1\x65\xfc"
+ "\x93\x07\x9e\x35\xcc\x40\xd7\x6e"
+ "\x05\x79\x10\xa7\x1b\xb2\x49\xe0"
+ "\x54\xeb\x82\x19\x8d\x24\xbb\x2f"
+ "\xc6\x5d\xf4\x68\xff\x96\x0a\xa1"
+ "\x38\xcf\x43\xda\x71\x08\x7c\x13"
+ "\xaa\x1e\xb5\x4c\xe3\x57\xee\x85"
+ "\x1c\x90\x27\xbe\x32\xc9\x60\xf7"
+ "\x6b\x02\x99\x0d\xa4\x3b\xd2\x46"
+ "\xdd\x74\x0b\x7f\x16\xad\x21\xb8"
+ "\x4f\xe6\x5a\xf1\x88\x1f\x93\x2a"
+ "\xc1\x35\xcc\x63\xfa\x6e\x05\x9c"
+ "\x10\xa7\x3e\xd5\x49\xe0\x77\x0e"
+ "\x82\x19\xb0\x24\xbb\x52\xe9\x5d"
+ "\xf4\x8b\x22\x96\x2d\xc4\x38\xcf"
+ "\x66\xfd\x71\x08\x9f\x13\xaa\x41"
+ "\xd8\x4c\xe3\x7a\x11\x85\x1c\xb3"
+ "\x27\xbe\x55\xec\x60\xf7\x8e\x02"
+ "\x99\x30\xc7\x3b\xd2\x69\x00\x74"
+ "\x0b\xa2\x16\xad\x44\xdb\x4f\xe6"
+ "\x7d\x14\x88\x1f\xb6\x2a\xc1\x58"
+ "\xef\x63\xfa\x91\x05\x9c\x33\xca"
+ "\x3e\xd5\x6c\x03\x77\x0e\xa5\x19"
+ "\xb0\x47\xde\x52\xe9\x80\x17\x8b"
+ "\x22\xb9\x2d\xc4\x5b\xf2\x66\xfd"
+ "\x94\x08\x9f\x36\xcd\x41\xd8\x6f"
+ "\x06\x7a\x11\xa8\x1c\xb3\x4a\xe1"
+ "\x55\xec\x83\x1a\x8e\x25\xbc\x30"
+ "\xc7\x5e\xf5\x69\x00\x97\x0b\xa2"
+ "\x39\xd0\x44\xdb\x72\x09\x7d\x14"
+ "\xab\x1f\xb6\x4d\xe4\x58\xef\x86"
+ "\x1d\x91\x28\xbf\x33\xca\x61\xf8"
+ "\x6c\x03\x9a\x0e\xa5\x3c\xd3\x47"
+ "\xde\x75\x0c\x80\x17\xae\x22\xb9"
+ "\x50\xe7\x5b\xf2\x89\x20\x94\x2b"
+ "\xc2\x36\xcd\x64\xfb\x6f\x06\x9d"
+ "\x11\xa8\x3f\xd6\x4a\xe1\x78\x0f"
+ "\x83\x1a\xb1\x25\xbc\x53\xea\x5e"
+ "\xf5\x8c\x00\x97\x2e\xc5\x39\xd0"
+ "\x67\xfe\x72\x09\xa0\x14\xab\x42"
+ "\xd9\x4d\xe4\x7b\x12\x86\x1d\xb4"
+ "\x28\xbf\x56\xed\x61\xf8\x8f\x03"
+ "\x9a\x31\xc8\x3c\xd3\x6a\x01\x75"
+ "\x0c\xa3\x17\xae\x45\xdc\x50\xe7"
+ "\x7e\x15\x89\x20\xb7\x2b\xc2\x59"
+ "\xf0\x64\xfb\x92\x06\x9d\x34\xcb"
+ "\x3f\xd6\x6d\x04\x78\x0f\xa6\x1a"
+ "\xb1\x48\xdf\x53\xea\x81\x18\x8c"
+ "\x23\xba\x2e\xc5\x5c\xf3\x67\xfe"
+ "\x95\x09\xa0\x37\xce\x42\xd9\x70"
+ "\x07\x7b\x12\xa9\x1d\xb4\x4b\xe2"
+ "\x56\xed\x84\x1b\x8f\x26\xbd\x31"
+ "\xc8\x5f\xf6\x6a\x01\x98\x0c\xa3"
+ "\x3a\xd1\x45\xdc\x73\x0a\x7e\x15"
+ "\xac\x20\xb7\x4e\xe5\x59\xf0\x87"
+ "\x1e\x92\x29\xc0\x34\xcb\x62\xf9"
+ "\x6d\x04\x9b\x0f\xa6\x3d\xd4\x48"
+ "\xdf\x76\x0d\x81\x18\xaf\x23\xba"
+ "\x51\xe8\x5c\xf3\x8a\x21\x95\x2c"
+ "\xc3\x37\xce\x65\xfc\x70\x07\x9e"
+ "\x12\xa9\x40\xd7\x4b\xe2\x79\x10"
+ "\x84\x1b\xb2\x26\xbd\x54\xeb\x5f"
+ "\xf6\x8d\x01\x98\x2f\xc6\x3a\xd1"
+ "\x68\xff\x73\x0a\xa1\x15\xac\x43"
+ "\xda\x4e\xe5\x7c\x13\x87\x1e\xb5"
+ "\x29\xc0\x57\xee\x62\xf9\x90\x04"
+ "\x9b\x32\xc9\x3d\xd4\x6b\x02\x76"
+ "\x0d\xa4\x18\xaf\x46\xdd\x51\xe8"
+ "\x7f\x16\x8a\x21\xb8\x2c\xc3\x5a"
+ "\xf1\x65\xfc\x93\x07\x9e\x35\xcc"
+ "\x40\xd7\x6e\x05\x79\x10\xa7\x1b"
+ "\xb2\x49\xe0\x54\xeb\x82\x19\x8d"
+ "\x24\xbb\x2f\xc6\x5d\xf4\x68\xff"
+ "\x96\x0a\xa1\x38\xcf\x43\xda\x71"
+ "\x08\x7c\x13\xaa\x1e\xb5\x4c",
+ .psize = 1023,
+ .digest = "\xc5\xce\x0c\xca\x01\x4f\x53\x3a"
+ "\x32\x32\x17\xcc\xd4\x6a\x71\xa9"
+ "\xf3\xed\x50\x10\x64\x8e\x06\xbe"
+ "\x9b\x4a\xa6\xbb\x05\x89\x59\x51",
+ }
};
/*
* SHA384 test vectors from from NIST and kerneli
*/
-#define SHA384_TEST_VECTORS 4
+#define SHA384_TEST_VECTORS 6
static struct hash_testvec sha384_tv_template[] = {
{
+ .plaintext = "",
+ .psize = 0,
+ .digest = "\x38\xb0\x60\xa7\x51\xac\x96\x38"
+ "\x4c\xd9\x32\x7e\xb1\xb1\xe3\x6a"
+ "\x21\xfd\xb7\x11\x14\xbe\x07\x43"
+ "\x4c\x0c\xc7\xbf\x63\xf6\xe1\xda"
+ "\x27\x4e\xde\xbf\xe7\x6f\x65\xfb"
+ "\xd5\x1a\xd2\xf1\x48\x98\xb9\x5b",
+ }, {
.plaintext= "abc",
.psize = 3,
.digest = "\xcb\x00\x75\x3f\x45\xa3\x5e\x8b"
@@ -541,16 +1078,163 @@ static struct hash_testvec sha384_tv_template[] = {
"\xc9\x38\xe2\xd1\x99\xe8\xbe\xa4",
.np = 4,
.tap = { 26, 26, 26, 26 }
- },
+ }, {
+ .plaintext = "\x08\x9f\x13\xaa\x41\xd8\x4c\xe3"
+ "\x7a\x11\x85\x1c\xb3\x27\xbe\x55"
+ "\xec\x60\xf7\x8e\x02\x99\x30\xc7"
+ "\x3b\xd2\x69\x00\x74\x0b\xa2\x16"
+ "\xad\x44\xdb\x4f\xe6\x7d\x14\x88"
+ "\x1f\xb6\x2a\xc1\x58\xef\x63\xfa"
+ "\x91\x05\x9c\x33\xca\x3e\xd5\x6c"
+ "\x03\x77\x0e\xa5\x19\xb0\x47\xde"
+ "\x52\xe9\x80\x17\x8b\x22\xb9\x2d"
+ "\xc4\x5b\xf2\x66\xfd\x94\x08\x9f"
+ "\x36\xcd\x41\xd8\x6f\x06\x7a\x11"
+ "\xa8\x1c\xb3\x4a\xe1\x55\xec\x83"
+ "\x1a\x8e\x25\xbc\x30\xc7\x5e\xf5"
+ "\x69\x00\x97\x0b\xa2\x39\xd0\x44"
+ "\xdb\x72\x09\x7d\x14\xab\x1f\xb6"
+ "\x4d\xe4\x58\xef\x86\x1d\x91\x28"
+ "\xbf\x33\xca\x61\xf8\x6c\x03\x9a"
+ "\x0e\xa5\x3c\xd3\x47\xde\x75\x0c"
+ "\x80\x17\xae\x22\xb9\x50\xe7\x5b"
+ "\xf2\x89\x20\x94\x2b\xc2\x36\xcd"
+ "\x64\xfb\x6f\x06\x9d\x11\xa8\x3f"
+ "\xd6\x4a\xe1\x78\x0f\x83\x1a\xb1"
+ "\x25\xbc\x53\xea\x5e\xf5\x8c\x00"
+ "\x97\x2e\xc5\x39\xd0\x67\xfe\x72"
+ "\x09\xa0\x14\xab\x42\xd9\x4d\xe4"
+ "\x7b\x12\x86\x1d\xb4\x28\xbf\x56"
+ "\xed\x61\xf8\x8f\x03\x9a\x31\xc8"
+ "\x3c\xd3\x6a\x01\x75\x0c\xa3\x17"
+ "\xae\x45\xdc\x50\xe7\x7e\x15\x89"
+ "\x20\xb7\x2b\xc2\x59\xf0\x64\xfb"
+ "\x92\x06\x9d\x34\xcb\x3f\xd6\x6d"
+ "\x04\x78\x0f\xa6\x1a\xb1\x48\xdf"
+ "\x53\xea\x81\x18\x8c\x23\xba\x2e"
+ "\xc5\x5c\xf3\x67\xfe\x95\x09\xa0"
+ "\x37\xce\x42\xd9\x70\x07\x7b\x12"
+ "\xa9\x1d\xb4\x4b\xe2\x56\xed\x84"
+ "\x1b\x8f\x26\xbd\x31\xc8\x5f\xf6"
+ "\x6a\x01\x98\x0c\xa3\x3a\xd1\x45"
+ "\xdc\x73\x0a\x7e\x15\xac\x20\xb7"
+ "\x4e\xe5\x59\xf0\x87\x1e\x92\x29"
+ "\xc0\x34\xcb\x62\xf9\x6d\x04\x9b"
+ "\x0f\xa6\x3d\xd4\x48\xdf\x76\x0d"
+ "\x81\x18\xaf\x23\xba\x51\xe8\x5c"
+ "\xf3\x8a\x21\x95\x2c\xc3\x37\xce"
+ "\x65\xfc\x70\x07\x9e\x12\xa9\x40"
+ "\xd7\x4b\xe2\x79\x10\x84\x1b\xb2"
+ "\x26\xbd\x54\xeb\x5f\xf6\x8d\x01"
+ "\x98\x2f\xc6\x3a\xd1\x68\xff\x73"
+ "\x0a\xa1\x15\xac\x43\xda\x4e\xe5"
+ "\x7c\x13\x87\x1e\xb5\x29\xc0\x57"
+ "\xee\x62\xf9\x90\x04\x9b\x32\xc9"
+ "\x3d\xd4\x6b\x02\x76\x0d\xa4\x18"
+ "\xaf\x46\xdd\x51\xe8\x7f\x16\x8a"
+ "\x21\xb8\x2c\xc3\x5a\xf1\x65\xfc"
+ "\x93\x07\x9e\x35\xcc\x40\xd7\x6e"
+ "\x05\x79\x10\xa7\x1b\xb2\x49\xe0"
+ "\x54\xeb\x82\x19\x8d\x24\xbb\x2f"
+ "\xc6\x5d\xf4\x68\xff\x96\x0a\xa1"
+ "\x38\xcf\x43\xda\x71\x08\x7c\x13"
+ "\xaa\x1e\xb5\x4c\xe3\x57\xee\x85"
+ "\x1c\x90\x27\xbe\x32\xc9\x60\xf7"
+ "\x6b\x02\x99\x0d\xa4\x3b\xd2\x46"
+ "\xdd\x74\x0b\x7f\x16\xad\x21\xb8"
+ "\x4f\xe6\x5a\xf1\x88\x1f\x93\x2a"
+ "\xc1\x35\xcc\x63\xfa\x6e\x05\x9c"
+ "\x10\xa7\x3e\xd5\x49\xe0\x77\x0e"
+ "\x82\x19\xb0\x24\xbb\x52\xe9\x5d"
+ "\xf4\x8b\x22\x96\x2d\xc4\x38\xcf"
+ "\x66\xfd\x71\x08\x9f\x13\xaa\x41"
+ "\xd8\x4c\xe3\x7a\x11\x85\x1c\xb3"
+ "\x27\xbe\x55\xec\x60\xf7\x8e\x02"
+ "\x99\x30\xc7\x3b\xd2\x69\x00\x74"
+ "\x0b\xa2\x16\xad\x44\xdb\x4f\xe6"
+ "\x7d\x14\x88\x1f\xb6\x2a\xc1\x58"
+ "\xef\x63\xfa\x91\x05\x9c\x33\xca"
+ "\x3e\xd5\x6c\x03\x77\x0e\xa5\x19"
+ "\xb0\x47\xde\x52\xe9\x80\x17\x8b"
+ "\x22\xb9\x2d\xc4\x5b\xf2\x66\xfd"
+ "\x94\x08\x9f\x36\xcd\x41\xd8\x6f"
+ "\x06\x7a\x11\xa8\x1c\xb3\x4a\xe1"
+ "\x55\xec\x83\x1a\x8e\x25\xbc\x30"
+ "\xc7\x5e\xf5\x69\x00\x97\x0b\xa2"
+ "\x39\xd0\x44\xdb\x72\x09\x7d\x14"
+ "\xab\x1f\xb6\x4d\xe4\x58\xef\x86"
+ "\x1d\x91\x28\xbf\x33\xca\x61\xf8"
+ "\x6c\x03\x9a\x0e\xa5\x3c\xd3\x47"
+ "\xde\x75\x0c\x80\x17\xae\x22\xb9"
+ "\x50\xe7\x5b\xf2\x89\x20\x94\x2b"
+ "\xc2\x36\xcd\x64\xfb\x6f\x06\x9d"
+ "\x11\xa8\x3f\xd6\x4a\xe1\x78\x0f"
+ "\x83\x1a\xb1\x25\xbc\x53\xea\x5e"
+ "\xf5\x8c\x00\x97\x2e\xc5\x39\xd0"
+ "\x67\xfe\x72\x09\xa0\x14\xab\x42"
+ "\xd9\x4d\xe4\x7b\x12\x86\x1d\xb4"
+ "\x28\xbf\x56\xed\x61\xf8\x8f\x03"
+ "\x9a\x31\xc8\x3c\xd3\x6a\x01\x75"
+ "\x0c\xa3\x17\xae\x45\xdc\x50\xe7"
+ "\x7e\x15\x89\x20\xb7\x2b\xc2\x59"
+ "\xf0\x64\xfb\x92\x06\x9d\x34\xcb"
+ "\x3f\xd6\x6d\x04\x78\x0f\xa6\x1a"
+ "\xb1\x48\xdf\x53\xea\x81\x18\x8c"
+ "\x23\xba\x2e\xc5\x5c\xf3\x67\xfe"
+ "\x95\x09\xa0\x37\xce\x42\xd9\x70"
+ "\x07\x7b\x12\xa9\x1d\xb4\x4b\xe2"
+ "\x56\xed\x84\x1b\x8f\x26\xbd\x31"
+ "\xc8\x5f\xf6\x6a\x01\x98\x0c\xa3"
+ "\x3a\xd1\x45\xdc\x73\x0a\x7e\x15"
+ "\xac\x20\xb7\x4e\xe5\x59\xf0\x87"
+ "\x1e\x92\x29\xc0\x34\xcb\x62\xf9"
+ "\x6d\x04\x9b\x0f\xa6\x3d\xd4\x48"
+ "\xdf\x76\x0d\x81\x18\xaf\x23\xba"
+ "\x51\xe8\x5c\xf3\x8a\x21\x95\x2c"
+ "\xc3\x37\xce\x65\xfc\x70\x07\x9e"
+ "\x12\xa9\x40\xd7\x4b\xe2\x79\x10"
+ "\x84\x1b\xb2\x26\xbd\x54\xeb\x5f"
+ "\xf6\x8d\x01\x98\x2f\xc6\x3a\xd1"
+ "\x68\xff\x73\x0a\xa1\x15\xac\x43"
+ "\xda\x4e\xe5\x7c\x13\x87\x1e\xb5"
+ "\x29\xc0\x57\xee\x62\xf9\x90\x04"
+ "\x9b\x32\xc9\x3d\xd4\x6b\x02\x76"
+ "\x0d\xa4\x18\xaf\x46\xdd\x51\xe8"
+ "\x7f\x16\x8a\x21\xb8\x2c\xc3\x5a"
+ "\xf1\x65\xfc\x93\x07\x9e\x35\xcc"
+ "\x40\xd7\x6e\x05\x79\x10\xa7\x1b"
+ "\xb2\x49\xe0\x54\xeb\x82\x19\x8d"
+ "\x24\xbb\x2f\xc6\x5d\xf4\x68\xff"
+ "\x96\x0a\xa1\x38\xcf\x43\xda\x71"
+ "\x08\x7c\x13\xaa\x1e\xb5\x4c",
+ .psize = 1023,
+ .digest = "\x4d\x97\x23\xc8\xea\x7a\x7c\x15"
+ "\xb8\xff\x97\x9c\xf5\x13\x4f\x31"
+ "\xde\x67\xf7\x24\x73\xcd\x70\x1c"
+ "\x03\x4a\xba\x8a\x87\x49\xfe\xdc"
+ "\x75\x29\x62\x83\xae\x3f\x17\xab"
+ "\xfd\x10\x4d\x8e\x17\x1c\x1f\xca",
+ }
};
/*
* SHA512 test vectors from from NIST and kerneli
*/
-#define SHA512_TEST_VECTORS 4
+#define SHA512_TEST_VECTORS 6
static struct hash_testvec sha512_tv_template[] = {
{
+ .plaintext = "",
+ .psize = 0,
+ .digest = "\xcf\x83\xe1\x35\x7e\xef\xb8\xbd"
+ "\xf1\x54\x28\x50\xd6\x6d\x80\x07"
+ "\xd6\x20\xe4\x05\x0b\x57\x15\xdc"
+ "\x83\xf4\xa9\x21\xd3\x6c\xe9\xce"
+ "\x47\xd0\xd1\x3c\x5d\x85\xf2\xb0"
+ "\xff\x83\x18\xd2\x87\x7e\xec\x2f"
+ "\x63\xb9\x31\xbd\x47\x41\x7a\x81"
+ "\xa5\x38\x32\x7a\xf9\x27\xda\x3e",
+ }, {
.plaintext = "abc",
.psize = 3,
.digest = "\xdd\xaf\x35\xa1\x93\x61\x7a\xba"
@@ -598,7 +1282,145 @@ static struct hash_testvec sha512_tv_template[] = {
"\xed\xb4\x19\x87\x23\x28\x50\xc9",
.np = 4,
.tap = { 26, 26, 26, 26 }
- },
+ }, {
+ .plaintext = "\x08\x9f\x13\xaa\x41\xd8\x4c\xe3"
+ "\x7a\x11\x85\x1c\xb3\x27\xbe\x55"
+ "\xec\x60\xf7\x8e\x02\x99\x30\xc7"
+ "\x3b\xd2\x69\x00\x74\x0b\xa2\x16"
+ "\xad\x44\xdb\x4f\xe6\x7d\x14\x88"
+ "\x1f\xb6\x2a\xc1\x58\xef\x63\xfa"
+ "\x91\x05\x9c\x33\xca\x3e\xd5\x6c"
+ "\x03\x77\x0e\xa5\x19\xb0\x47\xde"
+ "\x52\xe9\x80\x17\x8b\x22\xb9\x2d"
+ "\xc4\x5b\xf2\x66\xfd\x94\x08\x9f"
+ "\x36\xcd\x41\xd8\x6f\x06\x7a\x11"
+ "\xa8\x1c\xb3\x4a\xe1\x55\xec\x83"
+ "\x1a\x8e\x25\xbc\x30\xc7\x5e\xf5"
+ "\x69\x00\x97\x0b\xa2\x39\xd0\x44"
+ "\xdb\x72\x09\x7d\x14\xab\x1f\xb6"
+ "\x4d\xe4\x58\xef\x86\x1d\x91\x28"
+ "\xbf\x33\xca\x61\xf8\x6c\x03\x9a"
+ "\x0e\xa5\x3c\xd3\x47\xde\x75\x0c"
+ "\x80\x17\xae\x22\xb9\x50\xe7\x5b"
+ "\xf2\x89\x20\x94\x2b\xc2\x36\xcd"
+ "\x64\xfb\x6f\x06\x9d\x11\xa8\x3f"
+ "\xd6\x4a\xe1\x78\x0f\x83\x1a\xb1"
+ "\x25\xbc\x53\xea\x5e\xf5\x8c\x00"
+ "\x97\x2e\xc5\x39\xd0\x67\xfe\x72"
+ "\x09\xa0\x14\xab\x42\xd9\x4d\xe4"
+ "\x7b\x12\x86\x1d\xb4\x28\xbf\x56"
+ "\xed\x61\xf8\x8f\x03\x9a\x31\xc8"
+ "\x3c\xd3\x6a\x01\x75\x0c\xa3\x17"
+ "\xae\x45\xdc\x50\xe7\x7e\x15\x89"
+ "\x20\xb7\x2b\xc2\x59\xf0\x64\xfb"
+ "\x92\x06\x9d\x34\xcb\x3f\xd6\x6d"
+ "\x04\x78\x0f\xa6\x1a\xb1\x48\xdf"
+ "\x53\xea\x81\x18\x8c\x23\xba\x2e"
+ "\xc5\x5c\xf3\x67\xfe\x95\x09\xa0"
+ "\x37\xce\x42\xd9\x70\x07\x7b\x12"
+ "\xa9\x1d\xb4\x4b\xe2\x56\xed\x84"
+ "\x1b\x8f\x26\xbd\x31\xc8\x5f\xf6"
+ "\x6a\x01\x98\x0c\xa3\x3a\xd1\x45"
+ "\xdc\x73\x0a\x7e\x15\xac\x20\xb7"
+ "\x4e\xe5\x59\xf0\x87\x1e\x92\x29"
+ "\xc0\x34\xcb\x62\xf9\x6d\x04\x9b"
+ "\x0f\xa6\x3d\xd4\x48\xdf\x76\x0d"
+ "\x81\x18\xaf\x23\xba\x51\xe8\x5c"
+ "\xf3\x8a\x21\x95\x2c\xc3\x37\xce"
+ "\x65\xfc\x70\x07\x9e\x12\xa9\x40"
+ "\xd7\x4b\xe2\x79\x10\x84\x1b\xb2"
+ "\x26\xbd\x54\xeb\x5f\xf6\x8d\x01"
+ "\x98\x2f\xc6\x3a\xd1\x68\xff\x73"
+ "\x0a\xa1\x15\xac\x43\xda\x4e\xe5"
+ "\x7c\x13\x87\x1e\xb5\x29\xc0\x57"
+ "\xee\x62\xf9\x90\x04\x9b\x32\xc9"
+ "\x3d\xd4\x6b\x02\x76\x0d\xa4\x18"
+ "\xaf\x46\xdd\x51\xe8\x7f\x16\x8a"
+ "\x21\xb8\x2c\xc3\x5a\xf1\x65\xfc"
+ "\x93\x07\x9e\x35\xcc\x40\xd7\x6e"
+ "\x05\x79\x10\xa7\x1b\xb2\x49\xe0"
+ "\x54\xeb\x82\x19\x8d\x24\xbb\x2f"
+ "\xc6\x5d\xf4\x68\xff\x96\x0a\xa1"
+ "\x38\xcf\x43\xda\x71\x08\x7c\x13"
+ "\xaa\x1e\xb5\x4c\xe3\x57\xee\x85"
+ "\x1c\x90\x27\xbe\x32\xc9\x60\xf7"
+ "\x6b\x02\x99\x0d\xa4\x3b\xd2\x46"
+ "\xdd\x74\x0b\x7f\x16\xad\x21\xb8"
+ "\x4f\xe6\x5a\xf1\x88\x1f\x93\x2a"
+ "\xc1\x35\xcc\x63\xfa\x6e\x05\x9c"
+ "\x10\xa7\x3e\xd5\x49\xe0\x77\x0e"
+ "\x82\x19\xb0\x24\xbb\x52\xe9\x5d"
+ "\xf4\x8b\x22\x96\x2d\xc4\x38\xcf"
+ "\x66\xfd\x71\x08\x9f\x13\xaa\x41"
+ "\xd8\x4c\xe3\x7a\x11\x85\x1c\xb3"
+ "\x27\xbe\x55\xec\x60\xf7\x8e\x02"
+ "\x99\x30\xc7\x3b\xd2\x69\x00\x74"
+ "\x0b\xa2\x16\xad\x44\xdb\x4f\xe6"
+ "\x7d\x14\x88\x1f\xb6\x2a\xc1\x58"
+ "\xef\x63\xfa\x91\x05\x9c\x33\xca"
+ "\x3e\xd5\x6c\x03\x77\x0e\xa5\x19"
+ "\xb0\x47\xde\x52\xe9\x80\x17\x8b"
+ "\x22\xb9\x2d\xc4\x5b\xf2\x66\xfd"
+ "\x94\x08\x9f\x36\xcd\x41\xd8\x6f"
+ "\x06\x7a\x11\xa8\x1c\xb3\x4a\xe1"
+ "\x55\xec\x83\x1a\x8e\x25\xbc\x30"
+ "\xc7\x5e\xf5\x69\x00\x97\x0b\xa2"
+ "\x39\xd0\x44\xdb\x72\x09\x7d\x14"
+ "\xab\x1f\xb6\x4d\xe4\x58\xef\x86"
+ "\x1d\x91\x28\xbf\x33\xca\x61\xf8"
+ "\x6c\x03\x9a\x0e\xa5\x3c\xd3\x47"
+ "\xde\x75\x0c\x80\x17\xae\x22\xb9"
+ "\x50\xe7\x5b\xf2\x89\x20\x94\x2b"
+ "\xc2\x36\xcd\x64\xfb\x6f\x06\x9d"
+ "\x11\xa8\x3f\xd6\x4a\xe1\x78\x0f"
+ "\x83\x1a\xb1\x25\xbc\x53\xea\x5e"
+ "\xf5\x8c\x00\x97\x2e\xc5\x39\xd0"
+ "\x67\xfe\x72\x09\xa0\x14\xab\x42"
+ "\xd9\x4d\xe4\x7b\x12\x86\x1d\xb4"
+ "\x28\xbf\x56\xed\x61\xf8\x8f\x03"
+ "\x9a\x31\xc8\x3c\xd3\x6a\x01\x75"
+ "\x0c\xa3\x17\xae\x45\xdc\x50\xe7"
+ "\x7e\x15\x89\x20\xb7\x2b\xc2\x59"
+ "\xf0\x64\xfb\x92\x06\x9d\x34\xcb"
+ "\x3f\xd6\x6d\x04\x78\x0f\xa6\x1a"
+ "\xb1\x48\xdf\x53\xea\x81\x18\x8c"
+ "\x23\xba\x2e\xc5\x5c\xf3\x67\xfe"
+ "\x95\x09\xa0\x37\xce\x42\xd9\x70"
+ "\x07\x7b\x12\xa9\x1d\xb4\x4b\xe2"
+ "\x56\xed\x84\x1b\x8f\x26\xbd\x31"
+ "\xc8\x5f\xf6\x6a\x01\x98\x0c\xa3"
+ "\x3a\xd1\x45\xdc\x73\x0a\x7e\x15"
+ "\xac\x20\xb7\x4e\xe5\x59\xf0\x87"
+ "\x1e\x92\x29\xc0\x34\xcb\x62\xf9"
+ "\x6d\x04\x9b\x0f\xa6\x3d\xd4\x48"
+ "\xdf\x76\x0d\x81\x18\xaf\x23\xba"
+ "\x51\xe8\x5c\xf3\x8a\x21\x95\x2c"
+ "\xc3\x37\xce\x65\xfc\x70\x07\x9e"
+ "\x12\xa9\x40\xd7\x4b\xe2\x79\x10"
+ "\x84\x1b\xb2\x26\xbd\x54\xeb\x5f"
+ "\xf6\x8d\x01\x98\x2f\xc6\x3a\xd1"
+ "\x68\xff\x73\x0a\xa1\x15\xac\x43"
+ "\xda\x4e\xe5\x7c\x13\x87\x1e\xb5"
+ "\x29\xc0\x57\xee\x62\xf9\x90\x04"
+ "\x9b\x32\xc9\x3d\xd4\x6b\x02\x76"
+ "\x0d\xa4\x18\xaf\x46\xdd\x51\xe8"
+ "\x7f\x16\x8a\x21\xb8\x2c\xc3\x5a"
+ "\xf1\x65\xfc\x93\x07\x9e\x35\xcc"
+ "\x40\xd7\x6e\x05\x79\x10\xa7\x1b"
+ "\xb2\x49\xe0\x54\xeb\x82\x19\x8d"
+ "\x24\xbb\x2f\xc6\x5d\xf4\x68\xff"
+ "\x96\x0a\xa1\x38\xcf\x43\xda\x71"
+ "\x08\x7c\x13\xaa\x1e\xb5\x4c",
+ .psize = 1023,
+ .digest = "\x76\xc9\xd4\x91\x7a\x5f\x0f\xaa"
+ "\x13\x39\xf3\x01\x7a\xfa\xe5\x41"
+ "\x5f\x0b\xf8\xeb\x32\xfc\xbf\xb0"
+ "\xfa\x8c\xcd\x17\x83\xe2\xfa\xeb"
+ "\x1c\x19\xde\xe2\x75\xdc\x34\x64"
+ "\x5f\x35\x9c\x61\x2f\x10\xf9\xec"
+ "\x59\xca\x9d\xcc\x25\x0c\x43\xba"
+ "\x85\xa8\xf8\xfe\xb5\x24\xb2\xee",
+ }
};
@@ -985,6 +1807,21 @@ static struct hash_testvec tgr128_tv_template[] = {
},
};
+#define GHASH_TEST_VECTORS 1
+
+static struct hash_testvec ghash_tv_template[] =
+{
+ {
+
+ .key = "\xdf\xa6\xbf\x4d\xed\x81\xdb\x03\xff\xca\xff\x95\xf8\x30\xf0\x61",
+ .ksize = 16,
+ .plaintext = "\x95\x2b\x2a\x56\xa5\x60\x04a\xc0\xb3\x2b\x66\x56\xa0\x5b\x40\xb6",
+ .psize = 16,
+ .digest = "\xda\x53\xeb\x0a\xd2\xc5\x5b\xb6"
+ "\x4f\xc4\x80\x2c\xc3\xfe\xda\x60",
+ },
+};
+
/*
* HMAC-MD5 test vectors from RFC2202
* (These need to be fixed to not use strlen).
@@ -1568,6 +2405,131 @@ static struct hash_testvec hmac_sha256_tv_template[] = {
},
};
+#define CMAC_AES_TEST_VECTORS 6
+
+static struct hash_testvec aes_cmac128_tv_template[] = {
+ { /* From NIST Special Publication 800-38B, AES-128 */
+ .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6"
+ "\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
+ .plaintext = zeroed_string,
+ .digest = "\xbb\x1d\x69\x29\xe9\x59\x37\x28"
+ "\x7f\xa3\x7d\x12\x9b\x75\x67\x46",
+ .psize = 0,
+ .ksize = 16,
+ }, {
+ .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6"
+ "\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
+ .plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a",
+ .digest = "\x07\x0a\x16\xb4\x6b\x4d\x41\x44"
+ "\xf7\x9b\xdd\x9d\xd0\x4a\x28\x7c",
+ .psize = 16,
+ .ksize = 16,
+ }, {
+ .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6"
+ "\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
+ .plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11",
+ .digest = "\xdf\xa6\x67\x47\xde\x9a\xe6\x30"
+ "\x30\xca\x32\x61\x14\x97\xc8\x27",
+ .psize = 40,
+ .ksize = 16,
+ }, {
+ .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6"
+ "\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
+ .plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .digest = "\x51\xf0\xbe\xbf\x7e\x3b\x9d\x92"
+ "\xfc\x49\x74\x17\x79\x36\x3c\xfe",
+ .psize = 64,
+ .ksize = 16,
+ }, { /* From NIST Special Publication 800-38B, AES-256 */
+ .key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe"
+ "\x2b\x73\xae\xf0\x85\x7d\x77\x81"
+ "\x1f\x35\x2c\x07\x3b\x61\x08\xd7"
+ "\x2d\x98\x10\xa3\x09\x14\xdf\xf4",
+ .plaintext = zeroed_string,
+ .digest = "\x02\x89\x62\xf6\x1b\x7b\xf8\x9e"
+ "\xfc\x6b\x55\x1f\x46\x67\xd9\x83",
+ .psize = 0,
+ .ksize = 32,
+ }, {
+ .key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe"
+ "\x2b\x73\xae\xf0\x85\x7d\x77\x81"
+ "\x1f\x35\x2c\x07\x3b\x61\x08\xd7"
+ "\x2d\x98\x10\xa3\x09\x14\xdf\xf4",
+ .plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .digest = "\xe1\x99\x21\x90\x54\x9f\x6e\xd5"
+ "\x69\x6a\x2c\x05\x6c\x31\x54\x10",
+ .psize = 64,
+ .ksize = 32,
+ }
+};
+
+#define CMAC_DES3_EDE_TEST_VECTORS 4
+
+static struct hash_testvec des3_ede_cmac64_tv_template[] = {
+/*
+ * From NIST Special Publication 800-38B, Three Key TDEA
+ * Corrected test vectors from:
+ * http://csrc.nist.gov/publications/nistpubs/800-38B/Updated_CMAC_Examples.pdf
+ */
+ {
+ .key = "\x8a\xa8\x3b\xf8\xcb\xda\x10\x62"
+ "\x0b\xc1\xbf\x19\xfb\xb6\xcd\x58"
+ "\xbc\x31\x3d\x4a\x37\x1c\xa8\xb5",
+ .plaintext = zeroed_string,
+ .digest = "\xb7\xa6\x88\xe1\x22\xff\xaf\x95",
+ .psize = 0,
+ .ksize = 24,
+ }, {
+ .key = "\x8a\xa8\x3b\xf8\xcb\xda\x10\x62"
+ "\x0b\xc1\xbf\x19\xfb\xb6\xcd\x58"
+ "\xbc\x31\x3d\x4a\x37\x1c\xa8\xb5",
+ .plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96",
+ .digest = "\x8e\x8f\x29\x31\x36\x28\x37\x97",
+ .psize = 8,
+ .ksize = 24,
+ }, {
+ .key = "\x8a\xa8\x3b\xf8\xcb\xda\x10\x62"
+ "\x0b\xc1\xbf\x19\xfb\xb6\xcd\x58"
+ "\xbc\x31\x3d\x4a\x37\x1c\xa8\xb5",
+ .plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57",
+ .digest = "\x74\x3d\xdb\xe0\xce\x2d\xc2\xed",
+ .psize = 20,
+ .ksize = 24,
+ }, {
+ .key = "\x8a\xa8\x3b\xf8\xcb\xda\x10\x62"
+ "\x0b\xc1\xbf\x19\xfb\xb6\xcd\x58"
+ "\xbc\x31\x3d\x4a\x37\x1c\xa8\xb5",
+ .plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51",
+ .digest = "\x33\xe6\xb1\x09\x24\x00\xea\xe5",
+ .psize = 32,
+ .ksize = 24,
+ }
+};
+
#define XCBC_AES_TEST_VECTORS 6
static struct hash_testvec aes_xcbc128_tv_template[] = {
@@ -1636,6 +2598,109 @@ static struct hash_testvec aes_xcbc128_tv_template[] = {
}
};
+#define VMAC_AES_TEST_VECTORS 11
+static char vmac_string1[128] = {'\x01', '\x01', '\x01', '\x01',
+ '\x02', '\x03', '\x02', '\x02',
+ '\x02', '\x04', '\x01', '\x07',
+ '\x04', '\x01', '\x04', '\x03',};
+static char vmac_string2[128] = {'a', 'b', 'c',};
+static char vmac_string3[128] = {'a', 'b', 'c', 'a', 'b', 'c',
+ 'a', 'b', 'c', 'a', 'b', 'c',
+ 'a', 'b', 'c', 'a', 'b', 'c',
+ 'a', 'b', 'c', 'a', 'b', 'c',
+ 'a', 'b', 'c', 'a', 'b', 'c',
+ 'a', 'b', 'c', 'a', 'b', 'c',
+ 'a', 'b', 'c', 'a', 'b', 'c',
+ 'a', 'b', 'c', 'a', 'b', 'c',
+ };
+
+static char vmac_string4[17] = {'b', 'c', 'e', 'f',
+ 'i', 'j', 'l', 'm',
+ 'o', 'p', 'r', 's',
+ 't', 'u', 'w', 'x', 'z'};
+
+static char vmac_string5[127] = {'r', 'm', 'b', 't', 'c',
+ 'o', 'l', 'k', ']', '%',
+ '9', '2', '7', '!', 'A'};
+
+static char vmac_string6[129] = {'p', 't', '*', '7', 'l',
+ 'i', '!', '#', 'w', '0',
+ 'z', '/', '4', 'A', 'n'};
+
+static struct hash_testvec aes_vmac128_tv_template[] = {
+ {
+ .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .plaintext = NULL,
+ .digest = "\x07\x58\x80\x35\x77\xa4\x7b\x54",
+ .psize = 0,
+ .ksize = 16,
+ }, {
+ .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .plaintext = vmac_string1,
+ .digest = "\xce\xf5\x3c\xd3\xae\x68\x8c\xa1",
+ .psize = 128,
+ .ksize = 16,
+ }, {
+ .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .plaintext = vmac_string2,
+ .digest = "\xc9\x27\xb0\x73\x81\xbd\x14\x2d",
+ .psize = 128,
+ .ksize = 16,
+ }, {
+ .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .plaintext = vmac_string3,
+ .digest = "\x8d\x1a\x95\x8c\x98\x47\x0b\x19",
+ .psize = 128,
+ .ksize = 16,
+ }, {
+ .key = "abcdefghijklmnop",
+ .plaintext = NULL,
+ .digest = "\x3b\x89\xa1\x26\x9e\x55\x8f\x84",
+ .psize = 0,
+ .ksize = 16,
+ }, {
+ .key = "abcdefghijklmnop",
+ .plaintext = vmac_string1,
+ .digest = "\xab\x5e\xab\xb0\xf6\x8d\x74\xc2",
+ .psize = 128,
+ .ksize = 16,
+ }, {
+ .key = "abcdefghijklmnop",
+ .plaintext = vmac_string2,
+ .digest = "\x11\x15\x68\x42\x3d\x7b\x09\xdf",
+ .psize = 128,
+ .ksize = 16,
+ }, {
+ .key = "abcdefghijklmnop",
+ .plaintext = vmac_string3,
+ .digest = "\x8b\x32\x8f\xe1\xed\x8f\xfa\xd4",
+ .psize = 128,
+ .ksize = 16,
+ }, {
+ .key = "a09b5cd!f#07K\x00\x00\x00",
+ .plaintext = vmac_string4,
+ .digest = "\xab\xa5\x0f\xea\x42\x4e\xa1\x5f",
+ .psize = sizeof(vmac_string4),
+ .ksize = 16,
+ }, {
+ .key = "a09b5cd!f#07K\x00\x00\x00",
+ .plaintext = vmac_string5,
+ .digest = "\x25\x31\x98\xbc\x1d\xe8\x67\x60",
+ .psize = sizeof(vmac_string5),
+ .ksize = 16,
+ }, {
+ .key = "a09b5cd!f#07K\x00\x00\x00",
+ .plaintext = vmac_string6,
+ .digest = "\xc4\xae\x9b\x47\x95\x65\xeb\x41",
+ .psize = sizeof(vmac_string6),
+ .ksize = 16,
+ },
+};
+
/*
* SHA384 HMAC test vectors from RFC4231
*/
@@ -1850,14 +2915,18 @@ static struct hash_testvec hmac_sha512_tv_template[] = {
/*
* DES test vectors.
*/
-#define DES_ENC_TEST_VECTORS 10
-#define DES_DEC_TEST_VECTORS 4
-#define DES_CBC_ENC_TEST_VECTORS 5
-#define DES_CBC_DEC_TEST_VECTORS 4
-#define DES3_EDE_ENC_TEST_VECTORS 3
-#define DES3_EDE_DEC_TEST_VECTORS 3
-#define DES3_EDE_CBC_ENC_TEST_VECTORS 1
-#define DES3_EDE_CBC_DEC_TEST_VECTORS 1
+#define DES_ENC_TEST_VECTORS 11
+#define DES_DEC_TEST_VECTORS 5
+#define DES_CBC_ENC_TEST_VECTORS 6
+#define DES_CBC_DEC_TEST_VECTORS 5
+#define DES_CTR_ENC_TEST_VECTORS 2
+#define DES_CTR_DEC_TEST_VECTORS 2
+#define DES3_EDE_ENC_TEST_VECTORS 4
+#define DES3_EDE_DEC_TEST_VECTORS 4
+#define DES3_EDE_CBC_ENC_TEST_VECTORS 2
+#define DES3_EDE_CBC_DEC_TEST_VECTORS 2
+#define DES3_EDE_CTR_ENC_TEST_VECTORS 2
+#define DES3_EDE_CTR_DEC_TEST_VECTORS 2
static struct cipher_testvec des_enc_tv_template[] = {
{ /* From Applied Cryptography */
@@ -1960,6 +3029,76 @@ static struct cipher_testvec des_enc_tv_template[] = {
.rlen = 8,
.np = 8,
.tap = { 1, 1, 1, 1, 1, 1, 1, 1 }
+ }, { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55",
+ .klen = 8,
+ .input = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB",
+ .ilen = 248,
+ .result = "\x88\xCB\x1F\xAB\x2F\x2A\x49\x57"
+ "\x92\xB9\x77\xFF\x2F\x47\x58\xDD"
+ "\xD7\x8A\x91\x95\x26\x33\x78\xB2"
+ "\x33\xBA\xB2\x3E\x02\xF5\x1F\xEF"
+ "\x98\xC5\xA6\xD2\x7D\x79\xEC\xB3"
+ "\x45\xF3\x4C\x61\xAC\x6C\xC2\x55"
+ "\xE5\xD3\x06\x58\x8A\x42\x3E\xDD"
+ "\x3D\x20\x45\xE9\x6F\x0D\x25\xA8"
+ "\xA5\xC7\x69\xCE\xD5\x3B\x7B\xC9"
+ "\x9E\x65\xE7\xA3\xF2\xE4\x18\x94"
+ "\xD2\x81\xE9\x33\x2B\x2D\x49\xC4"
+ "\xFE\xDA\x7F\xE2\xF2\x8C\x9C\xDC"
+ "\x73\x58\x11\x1F\x81\xD7\x21\x1A"
+ "\x80\xD0\x0D\xE8\x45\xD6\xD8\xD5"
+ "\x2E\x51\x16\xCA\x09\x89\x54\x62"
+ "\xF7\x04\x3D\x75\xB9\xA3\x84\xF4"
+ "\x62\xF0\x02\x58\x83\xAF\x30\x87"
+ "\x85\x3F\x01\xCD\x8E\x58\x42\xC4"
+ "\x41\x73\xE0\x15\x0A\xE6\x2E\x80"
+ "\x94\xF8\x5B\x3A\x4E\xDF\x51\xB2"
+ "\x9D\xE4\xC4\x9D\xF7\x3F\xF8\x8E"
+ "\x37\x22\x4D\x00\x2A\xEF\xC1\x0F"
+ "\x14\xA0\x66\xAB\x79\x39\xD0\x8E"
+ "\xE9\x95\x61\x74\x12\xED\x07\xD7"
+ "\xDD\x95\xDC\x7B\x57\x25\x27\x9C"
+ "\x51\x96\x16\xF7\x94\x61\xB8\x87"
+ "\xF0\x21\x1B\x32\xFB\x07\x0F\x29"
+ "\x56\xBD\x9D\x22\xA2\x9F\xA2\xB9"
+ "\x46\x31\x4C\x5E\x2E\x95\x61\xEF"
+ "\xE1\x58\x39\x09\xB4\x8B\x40\xAC"
+ "\x5F\x62\xC7\x72\xD9\xFC\xCB\x9A",
+ .rlen = 248,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 248 - 8, 8 },
},
};
@@ -2000,6 +3139,76 @@ static struct cipher_testvec des_dec_tv_template[] = {
.rlen = 16,
.np = 3,
.tap = { 3, 12, 1 }
+ }, { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55",
+ .klen = 8,
+ .input = "\x88\xCB\x1F\xAB\x2F\x2A\x49\x57"
+ "\x92\xB9\x77\xFF\x2F\x47\x58\xDD"
+ "\xD7\x8A\x91\x95\x26\x33\x78\xB2"
+ "\x33\xBA\xB2\x3E\x02\xF5\x1F\xEF"
+ "\x98\xC5\xA6\xD2\x7D\x79\xEC\xB3"
+ "\x45\xF3\x4C\x61\xAC\x6C\xC2\x55"
+ "\xE5\xD3\x06\x58\x8A\x42\x3E\xDD"
+ "\x3D\x20\x45\xE9\x6F\x0D\x25\xA8"
+ "\xA5\xC7\x69\xCE\xD5\x3B\x7B\xC9"
+ "\x9E\x65\xE7\xA3\xF2\xE4\x18\x94"
+ "\xD2\x81\xE9\x33\x2B\x2D\x49\xC4"
+ "\xFE\xDA\x7F\xE2\xF2\x8C\x9C\xDC"
+ "\x73\x58\x11\x1F\x81\xD7\x21\x1A"
+ "\x80\xD0\x0D\xE8\x45\xD6\xD8\xD5"
+ "\x2E\x51\x16\xCA\x09\x89\x54\x62"
+ "\xF7\x04\x3D\x75\xB9\xA3\x84\xF4"
+ "\x62\xF0\x02\x58\x83\xAF\x30\x87"
+ "\x85\x3F\x01\xCD\x8E\x58\x42\xC4"
+ "\x41\x73\xE0\x15\x0A\xE6\x2E\x80"
+ "\x94\xF8\x5B\x3A\x4E\xDF\x51\xB2"
+ "\x9D\xE4\xC4\x9D\xF7\x3F\xF8\x8E"
+ "\x37\x22\x4D\x00\x2A\xEF\xC1\x0F"
+ "\x14\xA0\x66\xAB\x79\x39\xD0\x8E"
+ "\xE9\x95\x61\x74\x12\xED\x07\xD7"
+ "\xDD\x95\xDC\x7B\x57\x25\x27\x9C"
+ "\x51\x96\x16\xF7\x94\x61\xB8\x87"
+ "\xF0\x21\x1B\x32\xFB\x07\x0F\x29"
+ "\x56\xBD\x9D\x22\xA2\x9F\xA2\xB9"
+ "\x46\x31\x4C\x5E\x2E\x95\x61\xEF"
+ "\xE1\x58\x39\x09\xB4\x8B\x40\xAC"
+ "\x5F\x62\xC7\x72\xD9\xFC\xCB\x9A",
+ .ilen = 248,
+ .result = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB",
+ .rlen = 248,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 248 - 8, 8 },
},
};
@@ -2055,6 +3264,77 @@ static struct cipher_testvec des_cbc_enc_tv_template[] = {
.rlen = 24,
.np = 2,
.tap = { 13, 11 }
+ }, { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55",
+ .klen = 8,
+ .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47",
+ .input = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB",
+ .ilen = 248,
+ .result = "\x71\xCC\x56\x1C\x87\x2C\x43\x20"
+ "\x1C\x20\x13\x09\xF9\x2B\x40\x47"
+ "\x99\x10\xD1\x1B\x65\x33\x33\xBA"
+ "\x88\x0D\xA2\xD1\x86\xFF\x4D\xF4"
+ "\x5A\x0C\x12\x96\x32\x57\xAA\x26"
+ "\xA7\xF4\x32\x8D\xBC\x10\x31\x9E"
+ "\x81\x72\x74\xDE\x30\x19\x69\x49"
+ "\x54\x9C\xC3\xEB\x0B\x97\xDD\xD1"
+ "\xE8\x6D\x0D\x05\x83\xA5\x12\x08"
+ "\x47\xF8\x88\x03\x86\x51\x3C\xEF"
+ "\xE7\x11\x73\x4D\x44\x2B\xE2\x16"
+ "\xE8\xA5\x06\x50\x66\x70\x0E\x14"
+ "\xBA\x21\x3B\xD5\x23\x5B\xA7\x8F"
+ "\x56\xB6\xA7\x44\xDB\x86\xAB\x69"
+ "\x33\x3C\xBE\x64\xC4\x22\xD3\xFE"
+ "\x49\x90\x88\x6A\x09\x8F\x76\x59"
+ "\xCB\xB7\xA0\x2D\x79\x75\x92\x8A"
+ "\x82\x1D\xC2\xFE\x09\x1F\x78\x6B"
+ "\x2F\xD6\xA4\x87\x1E\xC4\x53\x63"
+ "\x80\x02\x61\x2F\xE3\x46\xB6\xB5"
+ "\xAA\x95\xF4\xEE\xA7\x64\x2B\x4F"
+ "\x20\xCF\xD2\x47\x4E\x39\x65\xB3"
+ "\x11\x87\xA2\x6C\x49\x7E\x36\xC7"
+ "\x62\x8B\x48\x0D\x6A\x64\x00\xBD"
+ "\x71\x91\x8C\xE9\x70\x19\x01\x4F"
+ "\x4E\x68\x23\xBA\xDA\x24\x2E\x45"
+ "\x02\x14\x33\x21\xAE\x58\x4B\xCF"
+ "\x3B\x4B\xE8\xF8\xF6\x4F\x34\x93"
+ "\xD7\x07\x8A\xD7\x18\x92\x36\x8C"
+ "\x82\xA9\xBD\x6A\x31\x91\x39\x11"
+ "\xC6\x4A\xF3\x55\xC7\x29\x2E\x63",
+ .rlen = 248,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 248 - 8, 8 },
},
};
@@ -2093,6 +3373,369 @@ static struct cipher_testvec des_cbc_dec_tv_template[] = {
.rlen = 8,
.np = 2,
.tap = { 4, 4 }
+ }, { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55",
+ .klen = 8,
+ .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47",
+ .input = "\x71\xCC\x56\x1C\x87\x2C\x43\x20"
+ "\x1C\x20\x13\x09\xF9\x2B\x40\x47"
+ "\x99\x10\xD1\x1B\x65\x33\x33\xBA"
+ "\x88\x0D\xA2\xD1\x86\xFF\x4D\xF4"
+ "\x5A\x0C\x12\x96\x32\x57\xAA\x26"
+ "\xA7\xF4\x32\x8D\xBC\x10\x31\x9E"
+ "\x81\x72\x74\xDE\x30\x19\x69\x49"
+ "\x54\x9C\xC3\xEB\x0B\x97\xDD\xD1"
+ "\xE8\x6D\x0D\x05\x83\xA5\x12\x08"
+ "\x47\xF8\x88\x03\x86\x51\x3C\xEF"
+ "\xE7\x11\x73\x4D\x44\x2B\xE2\x16"
+ "\xE8\xA5\x06\x50\x66\x70\x0E\x14"
+ "\xBA\x21\x3B\xD5\x23\x5B\xA7\x8F"
+ "\x56\xB6\xA7\x44\xDB\x86\xAB\x69"
+ "\x33\x3C\xBE\x64\xC4\x22\xD3\xFE"
+ "\x49\x90\x88\x6A\x09\x8F\x76\x59"
+ "\xCB\xB7\xA0\x2D\x79\x75\x92\x8A"
+ "\x82\x1D\xC2\xFE\x09\x1F\x78\x6B"
+ "\x2F\xD6\xA4\x87\x1E\xC4\x53\x63"
+ "\x80\x02\x61\x2F\xE3\x46\xB6\xB5"
+ "\xAA\x95\xF4\xEE\xA7\x64\x2B\x4F"
+ "\x20\xCF\xD2\x47\x4E\x39\x65\xB3"
+ "\x11\x87\xA2\x6C\x49\x7E\x36\xC7"
+ "\x62\x8B\x48\x0D\x6A\x64\x00\xBD"
+ "\x71\x91\x8C\xE9\x70\x19\x01\x4F"
+ "\x4E\x68\x23\xBA\xDA\x24\x2E\x45"
+ "\x02\x14\x33\x21\xAE\x58\x4B\xCF"
+ "\x3B\x4B\xE8\xF8\xF6\x4F\x34\x93"
+ "\xD7\x07\x8A\xD7\x18\x92\x36\x8C"
+ "\x82\xA9\xBD\x6A\x31\x91\x39\x11"
+ "\xC6\x4A\xF3\x55\xC7\x29\x2E\x63",
+ .ilen = 248,
+ .result = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB",
+ .rlen = 248,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 248 - 8, 8 },
+ },
+};
+
+static struct cipher_testvec des_ctr_enc_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55",
+ .klen = 8,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB",
+ .ilen = 248,
+ .result = "\x2F\x96\x06\x0F\x50\xC9\x68\x03"
+ "\x0F\x31\xD4\x64\xA5\x29\x77\x35"
+ "\xBC\x7A\x9F\x19\xE7\x0D\x33\x3E"
+ "\x12\x0B\x8C\xAE\x48\xAE\xD9\x02"
+ "\x0A\xD4\xB0\xD6\x37\xB2\x65\x1C"
+ "\x4B\x65\xEB\x24\xB5\x8E\xAD\x47"
+ "\x0D\xDA\x79\x77\xA0\x29\xA0\x2B"
+ "\xC8\x0F\x85\xDC\x03\x13\xA9\x04"
+ "\x19\x40\xBE\xBE\x5C\x49\x4A\x69"
+ "\xED\xE8\xE1\x9E\x14\x43\x74\xDE"
+ "\xEC\x6E\x11\x3F\x36\xEF\x7B\xFB"
+ "\xBE\x4C\x91\x43\x22\x65\x72\x48"
+ "\xE2\x12\xED\x88\xAC\xA7\xC9\x91"
+ "\x14\xA2\x36\x1C\x29\xFF\xC8\x4F"
+ "\x72\x5C\x4B\xB0\x1E\x93\xC2\xFA"
+ "\x9D\x53\x86\xA0\xAE\xC6\xB7\x3C"
+ "\x59\x0C\xD0\x8F\xA6\xD8\xA4\x31"
+ "\xB7\x30\x1C\x21\x38\xFB\x68\x8C"
+ "\x2E\xF5\x6E\x73\xC3\x16\x5F\x12"
+ "\x0C\x33\xB9\x1E\x7B\x70\xDE\x86"
+ "\x32\xB3\xC1\x16\xAB\xD9\x49\x0B"
+ "\x96\x28\x72\x6B\xF3\x30\xA9\xEB"
+ "\x69\xE2\x1E\x58\x46\xA2\x8E\xC7"
+ "\xC0\xEF\x07\xB7\x77\x2C\x00\x05"
+ "\x46\xBD\xFE\x53\x81\x8B\xA4\x03"
+ "\x20\x0F\xDB\x78\x0B\x1F\x53\x04"
+ "\x4C\x60\x4C\xC3\x2A\x86\x86\x7E"
+ "\x13\xD2\x26\xED\x5D\x3E\x9C\xF2"
+ "\x5C\xC4\x15\xC9\x9A\x21\xC5\xCD"
+ "\x19\x7F\x99\x19\x53\xCE\x1D\x14"
+ "\x69\x74\xA1\x06\x46\x0F\x4E\x75",
+ .rlen = 248,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 248 - 8, 8 },
+ }, { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55",
+ .klen = 8,
+ .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47",
+ .input = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82",
+ .ilen = 247,
+ .result = "\x62\xE5\xF4\xDC\x99\xE7\x89\xE3"
+ "\xF4\x10\xCC\x21\x99\xEB\xDC\x15"
+ "\x19\x13\x93\x27\x9D\xB6\x6F\x45"
+ "\x17\x55\x61\x72\xC8\xD3\x7F\xA5"
+ "\x32\xD0\xD3\x02\x15\xA4\x05\x23"
+ "\x9C\x23\x61\x60\x77\x7B\x6C\x95"
+ "\x26\x49\x42\x2E\xF3\xC1\x8C\x6D"
+ "\xC8\x47\xD5\x94\xE7\x53\xC8\x23"
+ "\x1B\xA5\x0B\xCB\x12\xD3\x7A\x12"
+ "\xA4\x42\x15\x34\xF7\x5F\xDC\x58"
+ "\x5B\x58\x4C\xAD\xD1\x33\x8E\xE6"
+ "\xE5\xA0\xDA\x4D\x94\x3D\x63\xA8"
+ "\x02\x82\xBB\x16\xB8\xDC\xB5\x58"
+ "\xC3\x2D\x79\xE4\x25\x79\x43\xF9"
+ "\x6D\xD3\xCA\xC0\xE8\x12\xD4\x7E"
+ "\x04\x25\x79\xFD\x27\xFB\xC4\xEA"
+ "\x32\x94\x48\x92\xF3\x68\x1A\x7F"
+ "\x36\x33\x43\x79\xF7\xCA\xC2\x38"
+ "\xC0\x68\xD4\x53\xA9\xCC\x43\x0C"
+ "\x40\x57\x3E\xED\x00\x9F\x22\x6E"
+ "\x80\x99\x0B\xCC\x40\x63\x46\x8A"
+ "\xE8\xC4\x9B\x6D\x7A\x08\x6E\xA9"
+ "\x6F\x84\xBC\xB3\xF4\x95\x0B\x2D"
+ "\x6A\xBA\x37\x50\xC3\xCF\x9F\x7C"
+ "\x59\x5E\xDE\x0B\x30\xFA\x34\x8A"
+ "\xF8\xD1\xA2\xF8\x4E\xBD\x5D\x5E"
+ "\x7D\x71\x99\xE0\xF6\xE5\x7C\xE0"
+ "\x6D\xEE\x82\x89\x92\xD4\xF5\xD7"
+ "\xDF\x85\x2D\xE1\xB2\xD6\xAB\x94"
+ "\xA5\xA6\xE7\xB0\x51\x36\x52\x37"
+ "\x91\x45\x05\x3E\x58\xBF\x32",
+ .rlen = 247,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 247 - 8, 8 },
+ },
+};
+
+static struct cipher_testvec des_ctr_dec_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55",
+ .klen = 8,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x2F\x96\x06\x0F\x50\xC9\x68\x03"
+ "\x0F\x31\xD4\x64\xA5\x29\x77\x35"
+ "\xBC\x7A\x9F\x19\xE7\x0D\x33\x3E"
+ "\x12\x0B\x8C\xAE\x48\xAE\xD9\x02"
+ "\x0A\xD4\xB0\xD6\x37\xB2\x65\x1C"
+ "\x4B\x65\xEB\x24\xB5\x8E\xAD\x47"
+ "\x0D\xDA\x79\x77\xA0\x29\xA0\x2B"
+ "\xC8\x0F\x85\xDC\x03\x13\xA9\x04"
+ "\x19\x40\xBE\xBE\x5C\x49\x4A\x69"
+ "\xED\xE8\xE1\x9E\x14\x43\x74\xDE"
+ "\xEC\x6E\x11\x3F\x36\xEF\x7B\xFB"
+ "\xBE\x4C\x91\x43\x22\x65\x72\x48"
+ "\xE2\x12\xED\x88\xAC\xA7\xC9\x91"
+ "\x14\xA2\x36\x1C\x29\xFF\xC8\x4F"
+ "\x72\x5C\x4B\xB0\x1E\x93\xC2\xFA"
+ "\x9D\x53\x86\xA0\xAE\xC6\xB7\x3C"
+ "\x59\x0C\xD0\x8F\xA6\xD8\xA4\x31"
+ "\xB7\x30\x1C\x21\x38\xFB\x68\x8C"
+ "\x2E\xF5\x6E\x73\xC3\x16\x5F\x12"
+ "\x0C\x33\xB9\x1E\x7B\x70\xDE\x86"
+ "\x32\xB3\xC1\x16\xAB\xD9\x49\x0B"
+ "\x96\x28\x72\x6B\xF3\x30\xA9\xEB"
+ "\x69\xE2\x1E\x58\x46\xA2\x8E\xC7"
+ "\xC0\xEF\x07\xB7\x77\x2C\x00\x05"
+ "\x46\xBD\xFE\x53\x81\x8B\xA4\x03"
+ "\x20\x0F\xDB\x78\x0B\x1F\x53\x04"
+ "\x4C\x60\x4C\xC3\x2A\x86\x86\x7E"
+ "\x13\xD2\x26\xED\x5D\x3E\x9C\xF2"
+ "\x5C\xC4\x15\xC9\x9A\x21\xC5\xCD"
+ "\x19\x7F\x99\x19\x53\xCE\x1D\x14"
+ "\x69\x74\xA1\x06\x46\x0F\x4E\x75",
+ .ilen = 248,
+ .result = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB",
+ .rlen = 248,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 248 - 8, 8 },
+ }, { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55",
+ .klen = 8,
+ .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47",
+ .input = "\x62\xE5\xF4\xDC\x99\xE7\x89\xE3"
+ "\xF4\x10\xCC\x21\x99\xEB\xDC\x15"
+ "\x19\x13\x93\x27\x9D\xB6\x6F\x45"
+ "\x17\x55\x61\x72\xC8\xD3\x7F\xA5"
+ "\x32\xD0\xD3\x02\x15\xA4\x05\x23"
+ "\x9C\x23\x61\x60\x77\x7B\x6C\x95"
+ "\x26\x49\x42\x2E\xF3\xC1\x8C\x6D"
+ "\xC8\x47\xD5\x94\xE7\x53\xC8\x23"
+ "\x1B\xA5\x0B\xCB\x12\xD3\x7A\x12"
+ "\xA4\x42\x15\x34\xF7\x5F\xDC\x58"
+ "\x5B\x58\x4C\xAD\xD1\x33\x8E\xE6"
+ "\xE5\xA0\xDA\x4D\x94\x3D\x63\xA8"
+ "\x02\x82\xBB\x16\xB8\xDC\xB5\x58"
+ "\xC3\x2D\x79\xE4\x25\x79\x43\xF9"
+ "\x6D\xD3\xCA\xC0\xE8\x12\xD4\x7E"
+ "\x04\x25\x79\xFD\x27\xFB\xC4\xEA"
+ "\x32\x94\x48\x92\xF3\x68\x1A\x7F"
+ "\x36\x33\x43\x79\xF7\xCA\xC2\x38"
+ "\xC0\x68\xD4\x53\xA9\xCC\x43\x0C"
+ "\x40\x57\x3E\xED\x00\x9F\x22\x6E"
+ "\x80\x99\x0B\xCC\x40\x63\x46\x8A"
+ "\xE8\xC4\x9B\x6D\x7A\x08\x6E\xA9"
+ "\x6F\x84\xBC\xB3\xF4\x95\x0B\x2D"
+ "\x6A\xBA\x37\x50\xC3\xCF\x9F\x7C"
+ "\x59\x5E\xDE\x0B\x30\xFA\x34\x8A"
+ "\xF8\xD1\xA2\xF8\x4E\xBD\x5D\x5E"
+ "\x7D\x71\x99\xE0\xF6\xE5\x7C\xE0"
+ "\x6D\xEE\x82\x89\x92\xD4\xF5\xD7"
+ "\xDF\x85\x2D\xE1\xB2\xD6\xAB\x94"
+ "\xA5\xA6\xE7\xB0\x51\x36\x52\x37"
+ "\x91\x45\x05\x3E\x58\xBF\x32",
+ .ilen = 247,
+ .result = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82",
+ .rlen = 247,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 247 - 8, 8 },
},
};
@@ -2124,6 +3767,140 @@ static struct cipher_testvec des3_ede_enc_tv_template[] = {
.ilen = 8,
.result = "\xe1\xef\x62\xc3\x32\xfe\x82\x5b",
.rlen = 8,
+ }, { /* Generated with Crypto++ */
+ .key = "\xF3\x9C\xD6\xF3\x9C\xB9\x5A\x67"
+ "\x00\x5A\x67\x00\x2D\xCE\xEB\x2D"
+ "\xCE\xEB\xB4\x51\x72\xB4\x51\x72",
+ .klen = 24,
+ .input = "\x05\xEC\x77\xFB\x42\xD5\x59\x20"
+ "\x8B\x12\x86\x69\xF0\x5B\xCF\x56"
+ "\x39\xAD\x34\x9F\x66\xEA\x7D\xC4"
+ "\x48\xD3\xBA\x0D\xB1\x18\xE3\x4A"
+ "\xFE\x41\x28\x5C\x27\x8E\x11\x85"
+ "\x6C\xF7\x5E\xC2\x55\x3C\xA0\x0B"
+ "\x92\x65\xE9\x70\xDB\x4F\xD6\xB9"
+ "\x00\xB4\x1F\xE6\x49\xFD\x44\x2F"
+ "\x53\x3A\x8D\x14\x98\x63\xCA\x5D"
+ "\xC1\xA8\x33\xA7\x0E\x91\x78\xEC"
+ "\x77\xDE\x42\xD5\xBC\x07\x8B\x12"
+ "\xE5\x4C\xF0\x5B\x22\x56\x39\x80"
+ "\x6B\x9F\x66\xC9\x50\xC4\xAF\x36"
+ "\xBA\x0D\x94\x7F\xE3\x4A\xDD\x41"
+ "\x28\xB3\x1A\x8E\x11\xF8\x43\xF7"
+ "\x5E\x21\x55\x3C\x87\x6E\x92\x65"
+ "\xCC\x57\xDB\xA2\x35\xB9\x00\xEB"
+ "\x72\xE6\x49\xD0\x44\x2F\xB6\x19"
+ "\x8D\x14\xFF\x46\xCA\x5D\x24\xA8"
+ "\x33\x9A\x6D\x91\x78\xC3\x77\xDE"
+ "\xA1\x08\xBC\x07\xEE\x71\xE5\x4C"
+ "\xD7\x5B\x22\xB5\x1C\x80\x6B\xF2"
+ "\x45\xC9\x50\x3B\xAF\x36\x99\x60"
+ "\x94\x7F\xC6\x4A\xDD\xA4\x0F\xB3"
+ "\x1A\xED\x74\xF8\x43\x2A\x5E\x21"
+ "\x88\x13\x87\x6E\xF1\x58\xCC\x57"
+ "\x3E\xA2\x35\x9C\x67\xEB\x72\xC5"
+ "\x49\xD0\xBB\x02\xB6\x19\xE0\x4B"
+ "\xFF\x46\x29\x5D\x24\x8F\x16\x9A"
+ "\x6D\xF4\x5F\xC3\xAA\x3D\xA1\x08"
+ "\x93\x7A\xEE\x71\xD8\x4C\xD7\xBE"
+ "\x01\xB5\x1C\xE7\x4E\xF2\x45\x2C"
+ "\x50\x3B\x82\x15\x99\x60\xCB\x52"
+ "\xC6\xA9\x30\xA4\x0F\x96\x79\xED"
+ "\x74\xDF\x43\x2A\xBD\x04\x88\x13"
+ "\xFA\x4D\xF1\x58\x23\x57\x3E\x81"
+ "\x68\x9C\x67\xCE\x51\xC5\xAC\x37"
+ "\xBB\x02\x95\x7C\xE0\x4B\xD2\x46"
+ "\x29\xB0\x1B\x8F\x16\xF9\x40\xF4"
+ "\x5F\x26\xAA\x3D\x84\x6F\x93\x7A"
+ "\xCD\x54\xD8\xA3\x0A\xBE\x01\xE8"
+ "\x73\xE7\x4E\xD1\x45\x2C\xB7\x1E"
+ "\x82\x15\xFC\x47\xCB\x52\x25\xA9"
+ "\x30\x9B\x62\x96\x79\xC0\x74\xDF"
+ "\xA6\x09\xBD\x04\xEF\x76\xFA\x4D"
+ "\xD4\x58\x23\x8A\x1D\x81\x68\xF3"
+ "\x5A\xCE\x51\x38\xAC\x37\x9E\x61"
+ "\x95\x7C\xC7\x4B\xD2\xA5\x0C\xB0"
+ "\x1B\xE2\x75\xF9\x40\x2B\x5F\x26"
+ "\x89\x10\x84\x6F\xF6\x59\xCD\x54"
+ "\x3F\xA3\x0A\x9D\x64\xE8\x73\xDA"
+ "\x4E\xD1\xB8\x03\xB7\x1E\xE1\x48"
+ "\xFC\x47\x2E\x52\x25\x8C\x17\x9B"
+ "\x62\xF5\x5C\xC0\xAB\x32\xA6\x09"
+ "\x90\x7B\xEF\x76\xD9\x4D\xD4\xBF"
+ "\x06\x8A\x1D\xE4\x4F\xF3\x5A\x2D"
+ "\x51\x38\x83\x6A\x9E\x61\xC8\x53"
+ "\xC7\xAE\x31\xA5\x0C\x97\x7E\xE2"
+ "\x75\xDC\x40\x2B\xB2\x05\x89\x10"
+ "\xFB\x42\xF6\x59\x20\x54\x3F\x86"
+ "\x69\x9D\x64\xCF\x56\xDA\xAD\x34"
+ "\xB8\x03\xEA\x7D\xE1\x48\xD3\x47",
+ .ilen = 496,
+ .result = "\x4E\x9A\x40\x3D\x61\x7D\x17\xFA"
+ "\x16\x86\x88\x0B\xD8\xAE\xF8\xE4"
+ "\x81\x01\x04\x00\x76\xFA\xED\xD3"
+ "\x44\x7E\x21\x9D\xF0\xFB\x2B\x64"
+ "\xCA\x4E\x90\xE0\xC0\x63\x28\x92"
+ "\xF3\x1F\xA4\x53\x2C\x77\xCC\x77"
+ "\x69\x56\xD0\x19\xAD\x00\x2D\x97"
+ "\xBC\xDE\x49\x6A\x82\xBC\x16\xE2"
+ "\x2F\x3E\x72\xEE\xD1\xCE\xFC\x1B"
+ "\xEA\x32\x56\xE4\x0B\xAF\x27\x36"
+ "\xAF\x08\xB9\x61\xB7\x48\x23\x27"
+ "\xEE\x4D\xC8\x79\x56\x06\xEB\xC7"
+ "\x5B\xCA\x0A\xC6\x5E\x5C\xCB\xB6"
+ "\x9D\xDA\x04\x59\xE2\x09\x48\x7E"
+ "\x6B\x37\xC6\xFE\x92\xA9\x1E\x6E"
+ "\x0D\x19\xFA\x33\x0F\xEE\x36\x68"
+ "\x11\xBB\xF9\x5A\x73\xAB\x3A\xEA"
+ "\xAC\x28\xD8\xD5\x27\xE8\x6B\x16"
+ "\x45\x86\x50\x01\x70\x35\x99\x92"
+ "\xDF\x0C\x07\x88\x8B\x7F\x9E\x4B"
+ "\xD2\x04\x84\x90\xC4\x27\xDF\x0A"
+ "\x49\xA8\xA7\x1A\x6D\x78\x16\xCA"
+ "\xB3\x18\x5C\xC3\x93\x63\x5A\x68"
+ "\x77\x02\xBA\xED\x62\x71\xB1\xD9"
+ "\x5E\xE5\x6F\x1A\xCC\x1D\xBE\x2E"
+ "\x11\xF3\xA6\x97\xCA\x8E\xBF\xB4"
+ "\x56\xA1\x36\x6B\xB1\x0A\x3E\x70"
+ "\xEA\xD7\xCD\x72\x7B\x79\xC8\xAD"
+ "\x6B\xFE\xFB\xBA\x64\xAE\x19\xC1"
+ "\x82\xCF\x8A\xA1\x50\x17\x7F\xB2"
+ "\x6F\x7B\x0F\x52\xC5\x3E\x4A\x52"
+ "\x3F\xD9\x3F\x01\xA6\x41\x1A\xB3"
+ "\xB3\x7A\x0E\x8E\x75\xB2\xB1\x5F"
+ "\xDB\xEA\x84\x13\x26\x6C\x85\x4E"
+ "\xAE\x6B\xDC\xE7\xE7\xAD\xB0\x06"
+ "\x5C\xBA\x92\xD0\x30\xBB\x8D\xD2"
+ "\xAE\x4C\x70\x85\xA0\x07\xE3\x2C"
+ "\xD1\x27\x9C\xCF\xDB\x13\xB7\xE5"
+ "\xF9\x6A\x02\xD0\x39\x9D\xB6\xE7"
+ "\xD1\x17\x25\x08\xF9\xA9\xA6\x67"
+ "\x38\x80\xD1\x22\xAB\x1A\xD7\x26"
+ "\xAD\xCA\x19\x1B\xFA\x18\xA7\x57"
+ "\x31\xEC\xC9\xED\xDB\x79\xC0\x48"
+ "\xAC\x31\x9F\x03\x8B\x62\x5B\x7E"
+ "\x0E\xA6\xD0\x64\xEE\xEA\x00\xFC"
+ "\x58\xC8\xDE\x51\x4E\x17\x15\x11"
+ "\x66\x58\xB6\x90\xDC\xDF\xA1\x49"
+ "\xCA\x79\xE9\x31\x31\x42\xDC\x56"
+ "\x0B\xCD\xB6\x0D\xC7\x64\xF7\x19"
+ "\xD9\x42\x05\x7F\xBC\x2F\xFC\x90"
+ "\xAE\x29\x86\xAA\x43\x7A\x4F\x6B"
+ "\xCE\xEA\xBC\x31\x8D\x65\x9D\x46"
+ "\xEA\x77\xB4\xF9\x58\xEA\x5D\x84"
+ "\xE4\xDC\x14\xBB\xBD\x15\x0E\xDA"
+ "\xD8\xE4\xA4\x5D\x61\xF9\x58\x0F"
+ "\xE4\x82\x77\xCE\x87\xC0\x09\xF0"
+ "\xD6\x10\x9E\x34\xE1\x0C\x67\x55"
+ "\x7B\x6D\xD5\x51\x4B\x00\xEE\xBA"
+ "\xF2\x7B\xBE\x75\x07\x42\x9D\x99"
+ "\x12\xE1\x71\x4A\xF9\x2A\xF5\xF6"
+ "\x93\x03\xD7\x51\x09\xFA\xBE\x68"
+ "\xD8\x45\xFF\x33\xBA\xBB\x2B\x63",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -2155,6 +3932,140 @@ static struct cipher_testvec des3_ede_dec_tv_template[] = {
.ilen = 8,
.result = "\x00\x00\x00\x00\x00\x00\x00\x00",
.rlen = 8,
+ }, { /* Generated with Crypto++ */
+ .key = "\xF3\x9C\xD6\xF3\x9C\xB9\x5A\x67"
+ "\x00\x5A\x67\x00\x2D\xCE\xEB\x2D"
+ "\xCE\xEB\xB4\x51\x72\xB4\x51\x72",
+ .klen = 24,
+ .input = "\x4E\x9A\x40\x3D\x61\x7D\x17\xFA"
+ "\x16\x86\x88\x0B\xD8\xAE\xF8\xE4"
+ "\x81\x01\x04\x00\x76\xFA\xED\xD3"
+ "\x44\x7E\x21\x9D\xF0\xFB\x2B\x64"
+ "\xCA\x4E\x90\xE0\xC0\x63\x28\x92"
+ "\xF3\x1F\xA4\x53\x2C\x77\xCC\x77"
+ "\x69\x56\xD0\x19\xAD\x00\x2D\x97"
+ "\xBC\xDE\x49\x6A\x82\xBC\x16\xE2"
+ "\x2F\x3E\x72\xEE\xD1\xCE\xFC\x1B"
+ "\xEA\x32\x56\xE4\x0B\xAF\x27\x36"
+ "\xAF\x08\xB9\x61\xB7\x48\x23\x27"
+ "\xEE\x4D\xC8\x79\x56\x06\xEB\xC7"
+ "\x5B\xCA\x0A\xC6\x5E\x5C\xCB\xB6"
+ "\x9D\xDA\x04\x59\xE2\x09\x48\x7E"
+ "\x6B\x37\xC6\xFE\x92\xA9\x1E\x6E"
+ "\x0D\x19\xFA\x33\x0F\xEE\x36\x68"
+ "\x11\xBB\xF9\x5A\x73\xAB\x3A\xEA"
+ "\xAC\x28\xD8\xD5\x27\xE8\x6B\x16"
+ "\x45\x86\x50\x01\x70\x35\x99\x92"
+ "\xDF\x0C\x07\x88\x8B\x7F\x9E\x4B"
+ "\xD2\x04\x84\x90\xC4\x27\xDF\x0A"
+ "\x49\xA8\xA7\x1A\x6D\x78\x16\xCA"
+ "\xB3\x18\x5C\xC3\x93\x63\x5A\x68"
+ "\x77\x02\xBA\xED\x62\x71\xB1\xD9"
+ "\x5E\xE5\x6F\x1A\xCC\x1D\xBE\x2E"
+ "\x11\xF3\xA6\x97\xCA\x8E\xBF\xB4"
+ "\x56\xA1\x36\x6B\xB1\x0A\x3E\x70"
+ "\xEA\xD7\xCD\x72\x7B\x79\xC8\xAD"
+ "\x6B\xFE\xFB\xBA\x64\xAE\x19\xC1"
+ "\x82\xCF\x8A\xA1\x50\x17\x7F\xB2"
+ "\x6F\x7B\x0F\x52\xC5\x3E\x4A\x52"
+ "\x3F\xD9\x3F\x01\xA6\x41\x1A\xB3"
+ "\xB3\x7A\x0E\x8E\x75\xB2\xB1\x5F"
+ "\xDB\xEA\x84\x13\x26\x6C\x85\x4E"
+ "\xAE\x6B\xDC\xE7\xE7\xAD\xB0\x06"
+ "\x5C\xBA\x92\xD0\x30\xBB\x8D\xD2"
+ "\xAE\x4C\x70\x85\xA0\x07\xE3\x2C"
+ "\xD1\x27\x9C\xCF\xDB\x13\xB7\xE5"
+ "\xF9\x6A\x02\xD0\x39\x9D\xB6\xE7"
+ "\xD1\x17\x25\x08\xF9\xA9\xA6\x67"
+ "\x38\x80\xD1\x22\xAB\x1A\xD7\x26"
+ "\xAD\xCA\x19\x1B\xFA\x18\xA7\x57"
+ "\x31\xEC\xC9\xED\xDB\x79\xC0\x48"
+ "\xAC\x31\x9F\x03\x8B\x62\x5B\x7E"
+ "\x0E\xA6\xD0\x64\xEE\xEA\x00\xFC"
+ "\x58\xC8\xDE\x51\x4E\x17\x15\x11"
+ "\x66\x58\xB6\x90\xDC\xDF\xA1\x49"
+ "\xCA\x79\xE9\x31\x31\x42\xDC\x56"
+ "\x0B\xCD\xB6\x0D\xC7\x64\xF7\x19"
+ "\xD9\x42\x05\x7F\xBC\x2F\xFC\x90"
+ "\xAE\x29\x86\xAA\x43\x7A\x4F\x6B"
+ "\xCE\xEA\xBC\x31\x8D\x65\x9D\x46"
+ "\xEA\x77\xB4\xF9\x58\xEA\x5D\x84"
+ "\xE4\xDC\x14\xBB\xBD\x15\x0E\xDA"
+ "\xD8\xE4\xA4\x5D\x61\xF9\x58\x0F"
+ "\xE4\x82\x77\xCE\x87\xC0\x09\xF0"
+ "\xD6\x10\x9E\x34\xE1\x0C\x67\x55"
+ "\x7B\x6D\xD5\x51\x4B\x00\xEE\xBA"
+ "\xF2\x7B\xBE\x75\x07\x42\x9D\x99"
+ "\x12\xE1\x71\x4A\xF9\x2A\xF5\xF6"
+ "\x93\x03\xD7\x51\x09\xFA\xBE\x68"
+ "\xD8\x45\xFF\x33\xBA\xBB\x2B\x63",
+ .ilen = 496,
+ .result = "\x05\xEC\x77\xFB\x42\xD5\x59\x20"
+ "\x8B\x12\x86\x69\xF0\x5B\xCF\x56"
+ "\x39\xAD\x34\x9F\x66\xEA\x7D\xC4"
+ "\x48\xD3\xBA\x0D\xB1\x18\xE3\x4A"
+ "\xFE\x41\x28\x5C\x27\x8E\x11\x85"
+ "\x6C\xF7\x5E\xC2\x55\x3C\xA0\x0B"
+ "\x92\x65\xE9\x70\xDB\x4F\xD6\xB9"
+ "\x00\xB4\x1F\xE6\x49\xFD\x44\x2F"
+ "\x53\x3A\x8D\x14\x98\x63\xCA\x5D"
+ "\xC1\xA8\x33\xA7\x0E\x91\x78\xEC"
+ "\x77\xDE\x42\xD5\xBC\x07\x8B\x12"
+ "\xE5\x4C\xF0\x5B\x22\x56\x39\x80"
+ "\x6B\x9F\x66\xC9\x50\xC4\xAF\x36"
+ "\xBA\x0D\x94\x7F\xE3\x4A\xDD\x41"
+ "\x28\xB3\x1A\x8E\x11\xF8\x43\xF7"
+ "\x5E\x21\x55\x3C\x87\x6E\x92\x65"
+ "\xCC\x57\xDB\xA2\x35\xB9\x00\xEB"
+ "\x72\xE6\x49\xD0\x44\x2F\xB6\x19"
+ "\x8D\x14\xFF\x46\xCA\x5D\x24\xA8"
+ "\x33\x9A\x6D\x91\x78\xC3\x77\xDE"
+ "\xA1\x08\xBC\x07\xEE\x71\xE5\x4C"
+ "\xD7\x5B\x22\xB5\x1C\x80\x6B\xF2"
+ "\x45\xC9\x50\x3B\xAF\x36\x99\x60"
+ "\x94\x7F\xC6\x4A\xDD\xA4\x0F\xB3"
+ "\x1A\xED\x74\xF8\x43\x2A\x5E\x21"
+ "\x88\x13\x87\x6E\xF1\x58\xCC\x57"
+ "\x3E\xA2\x35\x9C\x67\xEB\x72\xC5"
+ "\x49\xD0\xBB\x02\xB6\x19\xE0\x4B"
+ "\xFF\x46\x29\x5D\x24\x8F\x16\x9A"
+ "\x6D\xF4\x5F\xC3\xAA\x3D\xA1\x08"
+ "\x93\x7A\xEE\x71\xD8\x4C\xD7\xBE"
+ "\x01\xB5\x1C\xE7\x4E\xF2\x45\x2C"
+ "\x50\x3B\x82\x15\x99\x60\xCB\x52"
+ "\xC6\xA9\x30\xA4\x0F\x96\x79\xED"
+ "\x74\xDF\x43\x2A\xBD\x04\x88\x13"
+ "\xFA\x4D\xF1\x58\x23\x57\x3E\x81"
+ "\x68\x9C\x67\xCE\x51\xC5\xAC\x37"
+ "\xBB\x02\x95\x7C\xE0\x4B\xD2\x46"
+ "\x29\xB0\x1B\x8F\x16\xF9\x40\xF4"
+ "\x5F\x26\xAA\x3D\x84\x6F\x93\x7A"
+ "\xCD\x54\xD8\xA3\x0A\xBE\x01\xE8"
+ "\x73\xE7\x4E\xD1\x45\x2C\xB7\x1E"
+ "\x82\x15\xFC\x47\xCB\x52\x25\xA9"
+ "\x30\x9B\x62\x96\x79\xC0\x74\xDF"
+ "\xA6\x09\xBD\x04\xEF\x76\xFA\x4D"
+ "\xD4\x58\x23\x8A\x1D\x81\x68\xF3"
+ "\x5A\xCE\x51\x38\xAC\x37\x9E\x61"
+ "\x95\x7C\xC7\x4B\xD2\xA5\x0C\xB0"
+ "\x1B\xE2\x75\xF9\x40\x2B\x5F\x26"
+ "\x89\x10\x84\x6F\xF6\x59\xCD\x54"
+ "\x3F\xA3\x0A\x9D\x64\xE8\x73\xDA"
+ "\x4E\xD1\xB8\x03\xB7\x1E\xE1\x48"
+ "\xFC\x47\x2E\x52\x25\x8C\x17\x9B"
+ "\x62\xF5\x5C\xC0\xAB\x32\xA6\x09"
+ "\x90\x7B\xEF\x76\xD9\x4D\xD4\xBF"
+ "\x06\x8A\x1D\xE4\x4F\xF3\x5A\x2D"
+ "\x51\x38\x83\x6A\x9E\x61\xC8\x53"
+ "\xC7\xAE\x31\xA5\x0C\x97\x7E\xE2"
+ "\x75\xDC\x40\x2B\xB2\x05\x89\x10"
+ "\xFB\x42\xF6\x59\x20\x54\x3F\x86"
+ "\x69\x9D\x64\xCF\x56\xDA\xAD\x34"
+ "\xB8\x03\xEA\x7D\xE1\x48\xD3\x47",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -2199,6 +4110,142 @@ static struct cipher_testvec des3_ede_cbc_enc_tv_template[] = {
"\x9d\xde\xa5\x70\xe9\x42\x45\x8a"
"\x6b\xfa\xb1\x91\x13\xb0\xd9\x19",
.rlen = 128,
+ }, { /* Generated with Crypto++ */
+ .key = "\x9C\xD6\xF3\x9C\xB9\x5A\x67\x00"
+ "\x5A\x67\x00\x2D\xCE\xEB\x2D\xCE"
+ "\xEB\xB4\x51\x72\xB4\x51\x72\x1F",
+ .klen = 24,
+ .iv = "\xB2\xD7\x48\xED\x06\x44\xF9\x12"
+ "\xB7\x28\x4D\x83\x24\x59\xF2\x17",
+ .input = "\x05\xEC\x77\xFB\x42\xD5\x59\x20"
+ "\x8B\x12\x86\x69\xF0\x5B\xCF\x56"
+ "\x39\xAD\x34\x9F\x66\xEA\x7D\xC4"
+ "\x48\xD3\xBA\x0D\xB1\x18\xE3\x4A"
+ "\xFE\x41\x28\x5C\x27\x8E\x11\x85"
+ "\x6C\xF7\x5E\xC2\x55\x3C\xA0\x0B"
+ "\x92\x65\xE9\x70\xDB\x4F\xD6\xB9"
+ "\x00\xB4\x1F\xE6\x49\xFD\x44\x2F"
+ "\x53\x3A\x8D\x14\x98\x63\xCA\x5D"
+ "\xC1\xA8\x33\xA7\x0E\x91\x78\xEC"
+ "\x77\xDE\x42\xD5\xBC\x07\x8B\x12"
+ "\xE5\x4C\xF0\x5B\x22\x56\x39\x80"
+ "\x6B\x9F\x66\xC9\x50\xC4\xAF\x36"
+ "\xBA\x0D\x94\x7F\xE3\x4A\xDD\x41"
+ "\x28\xB3\x1A\x8E\x11\xF8\x43\xF7"
+ "\x5E\x21\x55\x3C\x87\x6E\x92\x65"
+ "\xCC\x57\xDB\xA2\x35\xB9\x00\xEB"
+ "\x72\xE6\x49\xD0\x44\x2F\xB6\x19"
+ "\x8D\x14\xFF\x46\xCA\x5D\x24\xA8"
+ "\x33\x9A\x6D\x91\x78\xC3\x77\xDE"
+ "\xA1\x08\xBC\x07\xEE\x71\xE5\x4C"
+ "\xD7\x5B\x22\xB5\x1C\x80\x6B\xF2"
+ "\x45\xC9\x50\x3B\xAF\x36\x99\x60"
+ "\x94\x7F\xC6\x4A\xDD\xA4\x0F\xB3"
+ "\x1A\xED\x74\xF8\x43\x2A\x5E\x21"
+ "\x88\x13\x87\x6E\xF1\x58\xCC\x57"
+ "\x3E\xA2\x35\x9C\x67\xEB\x72\xC5"
+ "\x49\xD0\xBB\x02\xB6\x19\xE0\x4B"
+ "\xFF\x46\x29\x5D\x24\x8F\x16\x9A"
+ "\x6D\xF4\x5F\xC3\xAA\x3D\xA1\x08"
+ "\x93\x7A\xEE\x71\xD8\x4C\xD7\xBE"
+ "\x01\xB5\x1C\xE7\x4E\xF2\x45\x2C"
+ "\x50\x3B\x82\x15\x99\x60\xCB\x52"
+ "\xC6\xA9\x30\xA4\x0F\x96\x79\xED"
+ "\x74\xDF\x43\x2A\xBD\x04\x88\x13"
+ "\xFA\x4D\xF1\x58\x23\x57\x3E\x81"
+ "\x68\x9C\x67\xCE\x51\xC5\xAC\x37"
+ "\xBB\x02\x95\x7C\xE0\x4B\xD2\x46"
+ "\x29\xB0\x1B\x8F\x16\xF9\x40\xF4"
+ "\x5F\x26\xAA\x3D\x84\x6F\x93\x7A"
+ "\xCD\x54\xD8\xA3\x0A\xBE\x01\xE8"
+ "\x73\xE7\x4E\xD1\x45\x2C\xB7\x1E"
+ "\x82\x15\xFC\x47\xCB\x52\x25\xA9"
+ "\x30\x9B\x62\x96\x79\xC0\x74\xDF"
+ "\xA6\x09\xBD\x04\xEF\x76\xFA\x4D"
+ "\xD4\x58\x23\x8A\x1D\x81\x68\xF3"
+ "\x5A\xCE\x51\x38\xAC\x37\x9E\x61"
+ "\x95\x7C\xC7\x4B\xD2\xA5\x0C\xB0"
+ "\x1B\xE2\x75\xF9\x40\x2B\x5F\x26"
+ "\x89\x10\x84\x6F\xF6\x59\xCD\x54"
+ "\x3F\xA3\x0A\x9D\x64\xE8\x73\xDA"
+ "\x4E\xD1\xB8\x03\xB7\x1E\xE1\x48"
+ "\xFC\x47\x2E\x52\x25\x8C\x17\x9B"
+ "\x62\xF5\x5C\xC0\xAB\x32\xA6\x09"
+ "\x90\x7B\xEF\x76\xD9\x4D\xD4\xBF"
+ "\x06\x8A\x1D\xE4\x4F\xF3\x5A\x2D"
+ "\x51\x38\x83\x6A\x9E\x61\xC8\x53"
+ "\xC7\xAE\x31\xA5\x0C\x97\x7E\xE2"
+ "\x75\xDC\x40\x2B\xB2\x05\x89\x10"
+ "\xFB\x42\xF6\x59\x20\x54\x3F\x86"
+ "\x69\x9D\x64\xCF\x56\xDA\xAD\x34"
+ "\xB8\x03\xEA\x7D\xE1\x48\xD3\x47",
+ .ilen = 496,
+ .result = "\xF8\xF6\xB5\x60\x5C\x5A\x75\x84"
+ "\x87\x81\x53\xBA\xC9\x6F\xEC\xD5"
+ "\x1E\x68\x8E\x85\x12\x86\x1D\x38"
+ "\x1C\x91\x40\xCC\x69\x6A\xD5\x35"
+ "\x0D\x7C\xB5\x07\x7C\x7B\x2A\xAF"
+ "\x32\xBC\xA1\xB3\x84\x31\x1B\x3C"
+ "\x0A\x2B\xFA\xD3\x9F\xB0\x8C\x37"
+ "\x8F\x9D\xA7\x6D\x6C\xFA\xD7\x90"
+ "\xE3\x69\x54\xED\x3A\xC4\xF1\x6B"
+ "\xB1\xCC\xFB\x7D\xD8\x8E\x17\x0B"
+ "\x9C\xF6\x4C\xD6\xFF\x03\x4E\xD9"
+ "\xE6\xA5\xAD\x25\xE6\x17\x69\x63"
+ "\x11\x35\x61\x94\x88\x7B\x1C\x48"
+ "\xF1\x24\x20\x29\x6B\x93\x1A\x8E"
+ "\x43\x03\x89\xD8\xB1\xDA\x47\x7B"
+ "\x79\x3A\x83\x76\xDA\xAE\xC6\xBB"
+ "\x22\xF8\xE8\x3D\x9A\x65\x54\xD8"
+ "\x4C\xE9\xE7\xE4\x63\x2F\x5C\x73"
+ "\x5A\xC3\xAE\x46\xA8\xCD\x57\xE6"
+ "\x67\x88\xA5\x20\x6F\x5F\x97\xC7"
+ "\xCC\x15\xA2\x0A\x93\xEA\x33\xE7"
+ "\x03\x5F\xEC\x64\x30\x6F\xEE\xD7"
+ "\x7E\xDF\xD6\xE9\x6F\x3F\xD6\x1E"
+ "\xBE\x67\x6C\x5B\x97\xA0\x09\xE6"
+ "\xEE\xFE\x55\xA3\x29\x65\xE0\x12"
+ "\xA1\x6A\x8A\x6F\xF2\xE6\xF1\x96"
+ "\x87\xFB\x9C\x05\xDD\x80\xEC\xFF"
+ "\xC5\xED\x50\xFE\xFC\x91\xCD\xCE"
+ "\x25\x2C\x5F\xD9\xAD\x95\x7D\x99"
+ "\xF0\x05\xC4\x71\x46\x5F\xF9\x0D"
+ "\xD2\x63\xDF\x9B\x96\x2E\x2B\xA6"
+ "\x2B\x1C\xD5\xFB\x96\x24\x60\x60"
+ "\x54\x40\xB8\x62\xA4\xF8\x46\x95"
+ "\x73\x28\xA3\xA6\x16\x2B\x17\xE7"
+ "\x7A\xF8\x62\x54\x3B\x64\x69\xE1"
+ "\x71\x34\x29\x5B\x4E\x05\x9B\xFA"
+ "\x5E\xF1\x96\xB7\xCE\x16\x9B\x59"
+ "\xF1\x1A\x4C\x51\x26\xFD\x79\xE2"
+ "\x3B\x8E\x71\x69\x6A\x91\xB6\x65"
+ "\x32\x09\xB8\xE4\x09\x1F\xEA\x39"
+ "\xCE\x20\x65\x9F\xD6\xD1\xC7\xF0"
+ "\x73\x50\x08\x56\x20\x9B\x94\x23"
+ "\x14\x39\xB7\x2B\xB1\x2D\x6D\x6F"
+ "\x41\x5B\xCC\xE2\x18\xAE\x62\x89"
+ "\x78\x8E\x67\x23\xD0\xFB\x2B\xE5"
+ "\x25\xC9\x48\x97\xB5\xD3\x17\xD5"
+ "\x6A\x9F\xA7\x48\x0C\x2B\x73\x3B"
+ "\x57\x08\xAE\x91\xF2\xB7\x57\x89"
+ "\xF4\xD0\xB0\x07\xB0\x42\x6C\xAF"
+ "\x98\x1A\xE7\xD1\xAC\x1E\xB5\x02"
+ "\xD4\x56\x42\x79\x79\x7F\x2A\x77"
+ "\x25\xE9\x7D\xC1\x88\x19\x2B\x49"
+ "\x6F\x46\x59\xAB\x56\x1F\x61\xE0"
+ "\x0C\x24\x9C\xC9\x5B\x63\xA9\x12"
+ "\xCF\x88\x96\xB6\xA8\x24\xC6\xA8"
+ "\x21\x85\x1A\x62\x7E\x34\xBB\xEB"
+ "\xBD\x02\x2A\xC7\xD8\x89\x80\xC5"
+ "\xB1\xBB\x60\xA5\x22\xFC\x6F\x38"
+ "\x02\x80\xA3\x28\x22\x75\xE1\xE9"
+ "\x90\xE9\xFA\x4B\x00\x10\xAC\x58"
+ "\x83\x70\xFF\x86\xE6\xAA\x0F\x1F"
+ "\x95\x63\x73\xA2\x44\xAC\xF8\xA5",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -2243,16 +4290,710 @@ static struct cipher_testvec des3_ede_cbc_dec_tv_template[] = {
"\x63\x65\x65\x72\x73\x74\x54\x20"
"\x6f\x6f\x4d\x20\x6e\x61\x0a\x79",
.rlen = 128,
+ }, { /* Generated with Crypto++ */
+ .key = "\x9C\xD6\xF3\x9C\xB9\x5A\x67\x00"
+ "\x5A\x67\x00\x2D\xCE\xEB\x2D\xCE"
+ "\xEB\xB4\x51\x72\xB4\x51\x72\x1F",
+ .klen = 24,
+ .iv = "\xB2\xD7\x48\xED\x06\x44\xF9\x12"
+ "\xB7\x28\x4D\x83\x24\x59\xF2\x17",
+ .input = "\xF8\xF6\xB5\x60\x5C\x5A\x75\x84"
+ "\x87\x81\x53\xBA\xC9\x6F\xEC\xD5"
+ "\x1E\x68\x8E\x85\x12\x86\x1D\x38"
+ "\x1C\x91\x40\xCC\x69\x6A\xD5\x35"
+ "\x0D\x7C\xB5\x07\x7C\x7B\x2A\xAF"
+ "\x32\xBC\xA1\xB3\x84\x31\x1B\x3C"
+ "\x0A\x2B\xFA\xD3\x9F\xB0\x8C\x37"
+ "\x8F\x9D\xA7\x6D\x6C\xFA\xD7\x90"
+ "\xE3\x69\x54\xED\x3A\xC4\xF1\x6B"
+ "\xB1\xCC\xFB\x7D\xD8\x8E\x17\x0B"
+ "\x9C\xF6\x4C\xD6\xFF\x03\x4E\xD9"
+ "\xE6\xA5\xAD\x25\xE6\x17\x69\x63"
+ "\x11\x35\x61\x94\x88\x7B\x1C\x48"
+ "\xF1\x24\x20\x29\x6B\x93\x1A\x8E"
+ "\x43\x03\x89\xD8\xB1\xDA\x47\x7B"
+ "\x79\x3A\x83\x76\xDA\xAE\xC6\xBB"
+ "\x22\xF8\xE8\x3D\x9A\x65\x54\xD8"
+ "\x4C\xE9\xE7\xE4\x63\x2F\x5C\x73"
+ "\x5A\xC3\xAE\x46\xA8\xCD\x57\xE6"
+ "\x67\x88\xA5\x20\x6F\x5F\x97\xC7"
+ "\xCC\x15\xA2\x0A\x93\xEA\x33\xE7"
+ "\x03\x5F\xEC\x64\x30\x6F\xEE\xD7"
+ "\x7E\xDF\xD6\xE9\x6F\x3F\xD6\x1E"
+ "\xBE\x67\x6C\x5B\x97\xA0\x09\xE6"
+ "\xEE\xFE\x55\xA3\x29\x65\xE0\x12"
+ "\xA1\x6A\x8A\x6F\xF2\xE6\xF1\x96"
+ "\x87\xFB\x9C\x05\xDD\x80\xEC\xFF"
+ "\xC5\xED\x50\xFE\xFC\x91\xCD\xCE"
+ "\x25\x2C\x5F\xD9\xAD\x95\x7D\x99"
+ "\xF0\x05\xC4\x71\x46\x5F\xF9\x0D"
+ "\xD2\x63\xDF\x9B\x96\x2E\x2B\xA6"
+ "\x2B\x1C\xD5\xFB\x96\x24\x60\x60"
+ "\x54\x40\xB8\x62\xA4\xF8\x46\x95"
+ "\x73\x28\xA3\xA6\x16\x2B\x17\xE7"
+ "\x7A\xF8\x62\x54\x3B\x64\x69\xE1"
+ "\x71\x34\x29\x5B\x4E\x05\x9B\xFA"
+ "\x5E\xF1\x96\xB7\xCE\x16\x9B\x59"
+ "\xF1\x1A\x4C\x51\x26\xFD\x79\xE2"
+ "\x3B\x8E\x71\x69\x6A\x91\xB6\x65"
+ "\x32\x09\xB8\xE4\x09\x1F\xEA\x39"
+ "\xCE\x20\x65\x9F\xD6\xD1\xC7\xF0"
+ "\x73\x50\x08\x56\x20\x9B\x94\x23"
+ "\x14\x39\xB7\x2B\xB1\x2D\x6D\x6F"
+ "\x41\x5B\xCC\xE2\x18\xAE\x62\x89"
+ "\x78\x8E\x67\x23\xD0\xFB\x2B\xE5"
+ "\x25\xC9\x48\x97\xB5\xD3\x17\xD5"
+ "\x6A\x9F\xA7\x48\x0C\x2B\x73\x3B"
+ "\x57\x08\xAE\x91\xF2\xB7\x57\x89"
+ "\xF4\xD0\xB0\x07\xB0\x42\x6C\xAF"
+ "\x98\x1A\xE7\xD1\xAC\x1E\xB5\x02"
+ "\xD4\x56\x42\x79\x79\x7F\x2A\x77"
+ "\x25\xE9\x7D\xC1\x88\x19\x2B\x49"
+ "\x6F\x46\x59\xAB\x56\x1F\x61\xE0"
+ "\x0C\x24\x9C\xC9\x5B\x63\xA9\x12"
+ "\xCF\x88\x96\xB6\xA8\x24\xC6\xA8"
+ "\x21\x85\x1A\x62\x7E\x34\xBB\xEB"
+ "\xBD\x02\x2A\xC7\xD8\x89\x80\xC5"
+ "\xB1\xBB\x60\xA5\x22\xFC\x6F\x38"
+ "\x02\x80\xA3\x28\x22\x75\xE1\xE9"
+ "\x90\xE9\xFA\x4B\x00\x10\xAC\x58"
+ "\x83\x70\xFF\x86\xE6\xAA\x0F\x1F"
+ "\x95\x63\x73\xA2\x44\xAC\xF8\xA5",
+ .ilen = 496,
+ .result = "\x05\xEC\x77\xFB\x42\xD5\x59\x20"
+ "\x8B\x12\x86\x69\xF0\x5B\xCF\x56"
+ "\x39\xAD\x34\x9F\x66\xEA\x7D\xC4"
+ "\x48\xD3\xBA\x0D\xB1\x18\xE3\x4A"
+ "\xFE\x41\x28\x5C\x27\x8E\x11\x85"
+ "\x6C\xF7\x5E\xC2\x55\x3C\xA0\x0B"
+ "\x92\x65\xE9\x70\xDB\x4F\xD6\xB9"
+ "\x00\xB4\x1F\xE6\x49\xFD\x44\x2F"
+ "\x53\x3A\x8D\x14\x98\x63\xCA\x5D"
+ "\xC1\xA8\x33\xA7\x0E\x91\x78\xEC"
+ "\x77\xDE\x42\xD5\xBC\x07\x8B\x12"
+ "\xE5\x4C\xF0\x5B\x22\x56\x39\x80"
+ "\x6B\x9F\x66\xC9\x50\xC4\xAF\x36"
+ "\xBA\x0D\x94\x7F\xE3\x4A\xDD\x41"
+ "\x28\xB3\x1A\x8E\x11\xF8\x43\xF7"
+ "\x5E\x21\x55\x3C\x87\x6E\x92\x65"
+ "\xCC\x57\xDB\xA2\x35\xB9\x00\xEB"
+ "\x72\xE6\x49\xD0\x44\x2F\xB6\x19"
+ "\x8D\x14\xFF\x46\xCA\x5D\x24\xA8"
+ "\x33\x9A\x6D\x91\x78\xC3\x77\xDE"
+ "\xA1\x08\xBC\x07\xEE\x71\xE5\x4C"
+ "\xD7\x5B\x22\xB5\x1C\x80\x6B\xF2"
+ "\x45\xC9\x50\x3B\xAF\x36\x99\x60"
+ "\x94\x7F\xC6\x4A\xDD\xA4\x0F\xB3"
+ "\x1A\xED\x74\xF8\x43\x2A\x5E\x21"
+ "\x88\x13\x87\x6E\xF1\x58\xCC\x57"
+ "\x3E\xA2\x35\x9C\x67\xEB\x72\xC5"
+ "\x49\xD0\xBB\x02\xB6\x19\xE0\x4B"
+ "\xFF\x46\x29\x5D\x24\x8F\x16\x9A"
+ "\x6D\xF4\x5F\xC3\xAA\x3D\xA1\x08"
+ "\x93\x7A\xEE\x71\xD8\x4C\xD7\xBE"
+ "\x01\xB5\x1C\xE7\x4E\xF2\x45\x2C"
+ "\x50\x3B\x82\x15\x99\x60\xCB\x52"
+ "\xC6\xA9\x30\xA4\x0F\x96\x79\xED"
+ "\x74\xDF\x43\x2A\xBD\x04\x88\x13"
+ "\xFA\x4D\xF1\x58\x23\x57\x3E\x81"
+ "\x68\x9C\x67\xCE\x51\xC5\xAC\x37"
+ "\xBB\x02\x95\x7C\xE0\x4B\xD2\x46"
+ "\x29\xB0\x1B\x8F\x16\xF9\x40\xF4"
+ "\x5F\x26\xAA\x3D\x84\x6F\x93\x7A"
+ "\xCD\x54\xD8\xA3\x0A\xBE\x01\xE8"
+ "\x73\xE7\x4E\xD1\x45\x2C\xB7\x1E"
+ "\x82\x15\xFC\x47\xCB\x52\x25\xA9"
+ "\x30\x9B\x62\x96\x79\xC0\x74\xDF"
+ "\xA6\x09\xBD\x04\xEF\x76\xFA\x4D"
+ "\xD4\x58\x23\x8A\x1D\x81\x68\xF3"
+ "\x5A\xCE\x51\x38\xAC\x37\x9E\x61"
+ "\x95\x7C\xC7\x4B\xD2\xA5\x0C\xB0"
+ "\x1B\xE2\x75\xF9\x40\x2B\x5F\x26"
+ "\x89\x10\x84\x6F\xF6\x59\xCD\x54"
+ "\x3F\xA3\x0A\x9D\x64\xE8\x73\xDA"
+ "\x4E\xD1\xB8\x03\xB7\x1E\xE1\x48"
+ "\xFC\x47\x2E\x52\x25\x8C\x17\x9B"
+ "\x62\xF5\x5C\xC0\xAB\x32\xA6\x09"
+ "\x90\x7B\xEF\x76\xD9\x4D\xD4\xBF"
+ "\x06\x8A\x1D\xE4\x4F\xF3\x5A\x2D"
+ "\x51\x38\x83\x6A\x9E\x61\xC8\x53"
+ "\xC7\xAE\x31\xA5\x0C\x97\x7E\xE2"
+ "\x75\xDC\x40\x2B\xB2\x05\x89\x10"
+ "\xFB\x42\xF6\x59\x20\x54\x3F\x86"
+ "\x69\x9D\x64\xCF\x56\xDA\xAD\x34"
+ "\xB8\x03\xEA\x7D\xE1\x48\xD3\x47",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec des3_ede_ctr_enc_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x9C\xD6\xF3\x9C\xB9\x5A\x67\x00"
+ "\x5A\x67\x00\x2D\xCE\xEB\x2D\xCE"
+ "\xEB\xB4\x51\x72\xB4\x51\x72\x1F",
+ .klen = 24,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
+ "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x05\xEC\x77\xFB\x42\xD5\x59\x20"
+ "\x8B\x12\x86\x69\xF0\x5B\xCF\x56"
+ "\x39\xAD\x34\x9F\x66\xEA\x7D\xC4"
+ "\x48\xD3\xBA\x0D\xB1\x18\xE3\x4A"
+ "\xFE\x41\x28\x5C\x27\x8E\x11\x85"
+ "\x6C\xF7\x5E\xC2\x55\x3C\xA0\x0B"
+ "\x92\x65\xE9\x70\xDB\x4F\xD6\xB9"
+ "\x00\xB4\x1F\xE6\x49\xFD\x44\x2F"
+ "\x53\x3A\x8D\x14\x98\x63\xCA\x5D"
+ "\xC1\xA8\x33\xA7\x0E\x91\x78\xEC"
+ "\x77\xDE\x42\xD5\xBC\x07\x8B\x12"
+ "\xE5\x4C\xF0\x5B\x22\x56\x39\x80"
+ "\x6B\x9F\x66\xC9\x50\xC4\xAF\x36"
+ "\xBA\x0D\x94\x7F\xE3\x4A\xDD\x41"
+ "\x28\xB3\x1A\x8E\x11\xF8\x43\xF7"
+ "\x5E\x21\x55\x3C\x87\x6E\x92\x65"
+ "\xCC\x57\xDB\xA2\x35\xB9\x00\xEB"
+ "\x72\xE6\x49\xD0\x44\x2F\xB6\x19"
+ "\x8D\x14\xFF\x46\xCA\x5D\x24\xA8"
+ "\x33\x9A\x6D\x91\x78\xC3\x77\xDE"
+ "\xA1\x08\xBC\x07\xEE\x71\xE5\x4C"
+ "\xD7\x5B\x22\xB5\x1C\x80\x6B\xF2"
+ "\x45\xC9\x50\x3B\xAF\x36\x99\x60"
+ "\x94\x7F\xC6\x4A\xDD\xA4\x0F\xB3"
+ "\x1A\xED\x74\xF8\x43\x2A\x5E\x21"
+ "\x88\x13\x87\x6E\xF1\x58\xCC\x57"
+ "\x3E\xA2\x35\x9C\x67\xEB\x72\xC5"
+ "\x49\xD0\xBB\x02\xB6\x19\xE0\x4B"
+ "\xFF\x46\x29\x5D\x24\x8F\x16\x9A"
+ "\x6D\xF4\x5F\xC3\xAA\x3D\xA1\x08"
+ "\x93\x7A\xEE\x71\xD8\x4C\xD7\xBE"
+ "\x01\xB5\x1C\xE7\x4E\xF2\x45\x2C"
+ "\x50\x3B\x82\x15\x99\x60\xCB\x52"
+ "\xC6\xA9\x30\xA4\x0F\x96\x79\xED"
+ "\x74\xDF\x43\x2A\xBD\x04\x88\x13"
+ "\xFA\x4D\xF1\x58\x23\x57\x3E\x81"
+ "\x68\x9C\x67\xCE\x51\xC5\xAC\x37"
+ "\xBB\x02\x95\x7C\xE0\x4B\xD2\x46"
+ "\x29\xB0\x1B\x8F\x16\xF9\x40\xF4"
+ "\x5F\x26\xAA\x3D\x84\x6F\x93\x7A"
+ "\xCD\x54\xD8\xA3\x0A\xBE\x01\xE8"
+ "\x73\xE7\x4E\xD1\x45\x2C\xB7\x1E"
+ "\x82\x15\xFC\x47\xCB\x52\x25\xA9"
+ "\x30\x9B\x62\x96\x79\xC0\x74\xDF"
+ "\xA6\x09\xBD\x04\xEF\x76\xFA\x4D"
+ "\xD4\x58\x23\x8A\x1D\x81\x68\xF3"
+ "\x5A\xCE\x51\x38\xAC\x37\x9E\x61"
+ "\x95\x7C\xC7\x4B\xD2\xA5\x0C\xB0"
+ "\x1B\xE2\x75\xF9\x40\x2B\x5F\x26"
+ "\x89\x10\x84\x6F\xF6\x59\xCD\x54"
+ "\x3F\xA3\x0A\x9D\x64\xE8\x73\xDA"
+ "\x4E\xD1\xB8\x03\xB7\x1E\xE1\x48"
+ "\xFC\x47\x2E\x52\x25\x8C\x17\x9B"
+ "\x62\xF5\x5C\xC0\xAB\x32\xA6\x09"
+ "\x90\x7B\xEF\x76\xD9\x4D\xD4\xBF"
+ "\x06\x8A\x1D\xE4\x4F\xF3\x5A\x2D"
+ "\x51\x38\x83\x6A\x9E\x61\xC8\x53"
+ "\xC7\xAE\x31\xA5\x0C\x97\x7E\xE2"
+ "\x75\xDC\x40\x2B\xB2\x05\x89\x10"
+ "\xFB\x42\xF6\x59\x20\x54\x3F\x86"
+ "\x69\x9D\x64\xCF\x56\xDA\xAD\x34"
+ "\xB8\x03\xEA\x7D\xE1\x48\xD3\x47",
+ .ilen = 496,
+ .result = "\x07\xC2\x08\x20\x72\x1F\x49\xEF"
+ "\x19\xCD\x6F\x32\x53\x05\x22\x15"
+ "\xA2\x85\x2B\xDB\x85\xD2\xD8\xB9"
+ "\xDD\x0D\x1B\x45\xCB\x69\x11\xD4"
+ "\xEA\xBE\xB2\x45\x5D\x0C\xAE\xBE"
+ "\xA0\xC1\x27\xAC\x65\x9F\x53\x7E"
+ "\xAF\xC2\x1B\xB5\xB8\x6D\x36\x0C"
+ "\x25\xC0\xF8\x6D\x0B\x29\x01\xDA"
+ "\x13\x78\xDC\x89\x12\x12\x43\xFA"
+ "\xF6\x12\xEF\x8D\x87\x62\x78\x83"
+ "\xE2\xBE\x41\x20\x4C\x6D\x35\x1B"
+ "\xD1\x0C\x30\xCF\xE2\xDE\x2B\x03"
+ "\xBF\x45\x73\xD4\xE5\x59\x95\xD1"
+ "\xB3\x9B\x27\x62\x97\xBD\xDE\x7F"
+ "\xA4\xD2\x39\x80\xAA\x50\x23\xF0"
+ "\x74\x88\x3D\xA8\x6A\x18\x79\x3B"
+ "\xC4\x96\x6C\x8D\x22\x40\x92\x6E"
+ "\xD6\xAD\x2A\x1F\xDE\x63\xC0\xE7"
+ "\x07\xF7\x2D\xF7\xB5\xF3\xF0\xCC"
+ "\x01\x7C\x2A\x9B\xC2\x10\xCA\xAA"
+ "\xFD\x2B\x3F\xC5\xF3\xF6\xFC\x9B"
+ "\x45\xDB\x53\xE4\x5B\xF3\xC9\x7B"
+ "\x8E\x52\xFF\xC8\x02\xB8\xAC\x9D"
+ "\xA1\x00\x39\xDA\x3D\x2D\x0E\x01"
+ "\x09\x7D\x8D\x5E\xBE\x53\xB9\xB0"
+ "\x8E\xE7\xE2\x96\x6A\xB2\x78\xEA"
+ "\xDE\x23\x8B\xA5\xFA\x5C\xE3\xDA"
+ "\xBF\x8E\x31\x6A\x55\xD1\x6A\xB2"
+ "\xB5\x46\x6F\xA5\xF0\xEE\xBA\x1F"
+ "\x9F\x98\xB0\x66\x4F\xD0\x3F\xA9"
+ "\xDF\x5F\x58\xC4\xF4\xFF\x75\x5C"
+ "\x40\x3A\x09\x7E\x6E\x1C\x97\xD4"
+ "\xCC\xE7\xE7\x71\xCF\x0B\x15\x08"
+ "\x71\xFA\x07\x97\xCD\xE6\xCA\x1D"
+ "\x14\x28\x0C\xCF\x99\x13\x7A\xF1"
+ "\xEB\xFA\xFA\x92\x07\xDE\x1D\xA1"
+ "\xD3\x36\x69\xFE\x51\x4D\x9F\x2E"
+ "\x83\x37\x4F\x1F\x48\x30\xED\x04"
+ "\x4D\xA4\xEF\x3A\xCA\x76\xF4\x1C"
+ "\x41\x8F\x63\x37\x78\x2F\x86\xA6"
+ "\xEF\x41\x7E\xD2\xAF\x88\xAB\x67"
+ "\x52\x71\xC3\x8E\xF8\x26\x93\x72"
+ "\xAA\xD6\x0E\xE7\x0B\x46\xB1\x3A"
+ "\xB4\x08\xA9\xA8\xA0\xCF\x20\x0C"
+ "\x52\xBC\x8B\x05\x56\xB2\xBC\x31"
+ "\x9B\x74\xB9\x29\x29\x96\x9A\x50"
+ "\xDC\x45\xDC\x1A\xEB\x0C\x64\xD4"
+ "\xD3\x05\x7E\x59\x55\xC3\xF4\x90"
+ "\xC2\xAB\xF8\x9B\x8A\xDA\xCE\xA1"
+ "\xC3\xF4\xAD\x77\xDD\x44\xC8\xAC"
+ "\xA3\xF1\xC9\xD2\x19\x5C\xB0\xCA"
+ "\xA2\x34\xC1\xF7\x6C\xFD\xAC\x65"
+ "\x32\xDC\x48\xC4\xF2\x00\x6B\x77"
+ "\xF1\x7D\x76\xAC\xC0\x31\x63\x2A"
+ "\xA5\x3A\x62\xC8\x91\xB1\x03\x65"
+ "\xCB\x43\xD1\x06\xDF\xC3\x67\xBC"
+ "\xDC\xE0\xCD\x35\xCE\x49\x65\xA0"
+ "\x52\x7B\xA7\x0D\x07\xA9\x1B\xB0"
+ "\x40\x77\x72\xC2\xEA\x0E\x3A\x78"
+ "\x46\xB9\x91\xB6\xE7\x3D\x51\x42"
+ "\xFD\x51\xB0\xC6\x2C\x63\x13\x78"
+ "\x5C\xEE\xFC\xCF\xC4\x70\x00\x34",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ }, { /* Generated with Crypto++ */
+ .key = "\x9C\xD6\xF3\x9C\xB9\x5A\x67\x00"
+ "\x5A\x67\x00\x2D\xCE\xEB\x2D\xCE"
+ "\xEB\xB4\x51\x72\xB4\x51\x72\x1F",
+ .klen = 24,
+ .iv = "\xB2\xD7\x48\xED\x06\x44\xF9\x12"
+ "\xB7\x28\x4D\x83\x24\x59\xF2\x17",
+ .input = "\x05\xEC\x77\xFB\x42\xD5\x59\x20"
+ "\x8B\x12\x86\x69\xF0\x5B\xCF\x56"
+ "\x39\xAD\x34\x9F\x66\xEA\x7D\xC4"
+ "\x48\xD3\xBA\x0D\xB1\x18\xE3\x4A"
+ "\xFE\x41\x28\x5C\x27\x8E\x11\x85"
+ "\x6C\xF7\x5E\xC2\x55\x3C\xA0\x0B"
+ "\x92\x65\xE9\x70\xDB\x4F\xD6\xB9"
+ "\x00\xB4\x1F\xE6\x49\xFD\x44\x2F"
+ "\x53\x3A\x8D\x14\x98\x63\xCA\x5D"
+ "\xC1\xA8\x33\xA7\x0E\x91\x78\xEC"
+ "\x77\xDE\x42\xD5\xBC\x07\x8B\x12"
+ "\xE5\x4C\xF0\x5B\x22\x56\x39\x80"
+ "\x6B\x9F\x66\xC9\x50\xC4\xAF\x36"
+ "\xBA\x0D\x94\x7F\xE3\x4A\xDD\x41"
+ "\x28\xB3\x1A\x8E\x11\xF8\x43\xF7"
+ "\x5E\x21\x55\x3C\x87\x6E\x92\x65"
+ "\xCC\x57\xDB\xA2\x35\xB9\x00\xEB"
+ "\x72\xE6\x49\xD0\x44\x2F\xB6\x19"
+ "\x8D\x14\xFF\x46\xCA\x5D\x24\xA8"
+ "\x33\x9A\x6D\x91\x78\xC3\x77\xDE"
+ "\xA1\x08\xBC\x07\xEE\x71\xE5\x4C"
+ "\xD7\x5B\x22\xB5\x1C\x80\x6B\xF2"
+ "\x45\xC9\x50\x3B\xAF\x36\x99\x60"
+ "\x94\x7F\xC6\x4A\xDD\xA4\x0F\xB3"
+ "\x1A\xED\x74\xF8\x43\x2A\x5E\x21"
+ "\x88\x13\x87\x6E\xF1\x58\xCC\x57"
+ "\x3E\xA2\x35\x9C\x67\xEB\x72\xC5"
+ "\x49\xD0\xBB\x02\xB6\x19\xE0\x4B"
+ "\xFF\x46\x29\x5D\x24\x8F\x16\x9A"
+ "\x6D\xF4\x5F\xC3\xAA\x3D\xA1\x08"
+ "\x93\x7A\xEE\x71\xD8\x4C\xD7\xBE"
+ "\x01\xB5\x1C\xE7\x4E\xF2\x45\x2C"
+ "\x50\x3B\x82\x15\x99\x60\xCB\x52"
+ "\xC6\xA9\x30\xA4\x0F\x96\x79\xED"
+ "\x74\xDF\x43\x2A\xBD\x04\x88\x13"
+ "\xFA\x4D\xF1\x58\x23\x57\x3E\x81"
+ "\x68\x9C\x67\xCE\x51\xC5\xAC\x37"
+ "\xBB\x02\x95\x7C\xE0\x4B\xD2\x46"
+ "\x29\xB0\x1B\x8F\x16\xF9\x40\xF4"
+ "\x5F\x26\xAA\x3D\x84\x6F\x93\x7A"
+ "\xCD\x54\xD8\xA3\x0A\xBE\x01\xE8"
+ "\x73\xE7\x4E\xD1\x45\x2C\xB7\x1E"
+ "\x82\x15\xFC\x47\xCB\x52\x25\xA9"
+ "\x30\x9B\x62\x96\x79\xC0\x74\xDF"
+ "\xA6\x09\xBD\x04\xEF\x76\xFA\x4D"
+ "\xD4\x58\x23\x8A\x1D\x81\x68\xF3"
+ "\x5A\xCE\x51\x38\xAC\x37\x9E\x61"
+ "\x95\x7C\xC7\x4B\xD2\xA5\x0C\xB0"
+ "\x1B\xE2\x75\xF9\x40\x2B\x5F\x26"
+ "\x89\x10\x84\x6F\xF6\x59\xCD\x54"
+ "\x3F\xA3\x0A\x9D\x64\xE8\x73\xDA"
+ "\x4E\xD1\xB8\x03\xB7\x1E\xE1\x48"
+ "\xFC\x47\x2E\x52\x25\x8C\x17\x9B"
+ "\x62\xF5\x5C\xC0\xAB\x32\xA6\x09"
+ "\x90\x7B\xEF\x76\xD9\x4D\xD4\xBF"
+ "\x06\x8A\x1D\xE4\x4F\xF3\x5A\x2D"
+ "\x51\x38\x83\x6A\x9E\x61\xC8\x53"
+ "\xC7\xAE\x31\xA5\x0C\x97\x7E\xE2"
+ "\x75\xDC\x40\x2B\xB2\x05\x89\x10"
+ "\xFB\x42\xF6\x59\x20\x54\x3F\x86"
+ "\x69\x9D\x64\xCF\x56\xDA\xAD\x34"
+ "\xB8\x03\xEA\x7D\xE1\x48\xD3\x47"
+ "\x2E\xB1\x18",
+ .ilen = 499,
+ .result = "\x23\xFF\x5C\x99\x75\xBB\x1F\xD4"
+ "\xBC\x27\x9D\x36\x60\xA9\xC9\xF7"
+ "\x94\x9D\x1B\xFF\x8E\x95\x57\x89"
+ "\x8C\x2E\x33\x70\x43\x61\xE6\xD2"
+ "\x82\x33\x63\xB6\xC4\x34\x5E\xF8"
+ "\x96\x07\xA7\xD2\x3B\x8E\xC9\xAA"
+ "\x7C\xA0\x55\x89\x2E\xE1\x85\x25"
+ "\x14\x04\xDA\x6B\xE0\xEE\x56\xCF"
+ "\x08\x2E\x69\xD4\x54\xDE\x22\x84"
+ "\x69\xA6\xA7\xD3\x3A\x9A\xE8\x05"
+ "\x63\xDB\xBF\x46\x3A\x26\x2E\x0F"
+ "\x58\x5C\x46\xEA\x07\x40\xDA\xE1"
+ "\x14\x1D\xCD\x4F\x06\xC0\xCA\x54"
+ "\x1E\xC9\x45\x85\x67\x7C\xC2\xB5"
+ "\x97\x5D\x61\x78\x2E\x46\xEC\x6A"
+ "\x53\xF4\xD0\xAE\xFA\xB4\x86\x29"
+ "\x9F\x17\x33\x24\xD8\xB9\xB2\x05"
+ "\x93\x88\xEA\xF7\xA0\x70\x69\x49"
+ "\x88\x6B\x73\x40\x41\x8D\xD9\xD9"
+ "\x7E\x78\xE9\xBE\x6C\x14\x22\x7A"
+ "\x66\xE1\xDA\xED\x10\xFF\x69\x1D"
+ "\xB9\xAA\xF2\x56\x72\x1B\x23\xE2"
+ "\x45\x54\x8B\xA3\x70\x23\xB4\x5E"
+ "\x8E\x96\xC9\x05\x00\xB3\xB6\xC2"
+ "\x2A\x02\x43\x7A\x62\xD5\xC8\xD2"
+ "\xC2\xD0\xE4\x78\xA1\x7B\x3E\xE8"
+ "\x9F\x7F\x7D\x40\x54\x30\x3B\xC0"
+ "\xA5\x54\xFD\xCA\x25\xEC\x44\x3E"
+ "\x1A\x54\x7F\x88\xD0\xE1\xFE\x71"
+ "\xCE\x05\x49\x89\xBA\xD6\x72\xE7"
+ "\xD6\x5D\x3F\xA2\xD9\xAB\xC5\x02"
+ "\xD6\x43\x22\xAF\xA2\xE4\x80\x85"
+ "\xD7\x87\xB9\xEA\x43\xDB\xC8\xEF"
+ "\x5C\x82\x2E\x98\x0D\x30\x41\x6B"
+ "\x08\x48\x8D\xF0\xF8\x60\xD7\x9D"
+ "\xE9\xDE\x40\xAD\x0D\xAD\x0D\x58"
+ "\x2A\x98\x35\xFE\xF7\xDD\x4B\x40"
+ "\xDE\xB0\x05\xD9\x7B\x09\x4D\xBC"
+ "\x42\xC0\xF1\x15\x0B\xFA\x26\x6B"
+ "\xC6\x12\x13\x4F\xCB\x35\xBA\x35"
+ "\xDD\x7A\x36\x9C\x12\x57\x55\x83"
+ "\x78\x58\x09\xD0\xB0\xCF\x7C\x5C"
+ "\x38\xCF\xBD\x79\x5B\x13\x4D\x97"
+ "\xC1\x85\x6F\x97\xC9\xE8\xC2\xA4"
+ "\x98\xE2\xBD\x77\x6B\x53\x39\x1A"
+ "\x28\x10\xE7\xE0\xE7\xDE\x9D\x69"
+ "\x78\x6F\x8E\xD2\xD9\x5D\xD2\x15"
+ "\x9E\xB5\x4D\x8C\xC0\x78\x22\x2F"
+ "\x17\x11\x2E\x99\xD7\xE3\xA4\x4F"
+ "\x65\xA5\x6B\x03\x2C\x35\x6F\xDA"
+ "\x8A\x19\x08\xE1\x08\x48\x59\x51"
+ "\x53\x4B\xD1\xDF\xDA\x14\x50\x5F"
+ "\xDF\xB5\x8C\xDF\xC6\xFD\x85\xFA"
+ "\xD4\xF9\x64\x45\x65\x0D\x7D\xF4"
+ "\xC8\xCD\x3F\x32\xAF\xDD\x30\xED"
+ "\x7B\xAA\xAC\xF0\xDA\x7F\xDF\x75"
+ "\x1C\xA4\xF1\xCB\x5E\x4F\x0B\xB4"
+ "\x97\x73\x28\xDE\xCF\xAF\x82\xBD"
+ "\xC4\xBA\xB4\x9C\x0D\x16\x77\x42"
+ "\x42\x39\x7C\x53\xA4\xD4\xDD\x40"
+ "\x5C\x60\x1F\x6E\xA7\xE2\xDC\xE7"
+ "\x32\x0F\x05\x2F\xF2\x4C\x95\x3B"
+ "\xF2\x79\xD9",
+ .rlen = 499,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 499 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec des3_ede_ctr_dec_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x9C\xD6\xF3\x9C\xB9\x5A\x67\x00"
+ "\x5A\x67\x00\x2D\xCE\xEB\x2D\xCE"
+ "\xEB\xB4\x51\x72\xB4\x51\x72\x1F",
+ .klen = 24,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
+ "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x07\xC2\x08\x20\x72\x1F\x49\xEF"
+ "\x19\xCD\x6F\x32\x53\x05\x22\x15"
+ "\xA2\x85\x2B\xDB\x85\xD2\xD8\xB9"
+ "\xDD\x0D\x1B\x45\xCB\x69\x11\xD4"
+ "\xEA\xBE\xB2\x45\x5D\x0C\xAE\xBE"
+ "\xA0\xC1\x27\xAC\x65\x9F\x53\x7E"
+ "\xAF\xC2\x1B\xB5\xB8\x6D\x36\x0C"
+ "\x25\xC0\xF8\x6D\x0B\x29\x01\xDA"
+ "\x13\x78\xDC\x89\x12\x12\x43\xFA"
+ "\xF6\x12\xEF\x8D\x87\x62\x78\x83"
+ "\xE2\xBE\x41\x20\x4C\x6D\x35\x1B"
+ "\xD1\x0C\x30\xCF\xE2\xDE\x2B\x03"
+ "\xBF\x45\x73\xD4\xE5\x59\x95\xD1"
+ "\xB3\x9B\x27\x62\x97\xBD\xDE\x7F"
+ "\xA4\xD2\x39\x80\xAA\x50\x23\xF0"
+ "\x74\x88\x3D\xA8\x6A\x18\x79\x3B"
+ "\xC4\x96\x6C\x8D\x22\x40\x92\x6E"
+ "\xD6\xAD\x2A\x1F\xDE\x63\xC0\xE7"
+ "\x07\xF7\x2D\xF7\xB5\xF3\xF0\xCC"
+ "\x01\x7C\x2A\x9B\xC2\x10\xCA\xAA"
+ "\xFD\x2B\x3F\xC5\xF3\xF6\xFC\x9B"
+ "\x45\xDB\x53\xE4\x5B\xF3\xC9\x7B"
+ "\x8E\x52\xFF\xC8\x02\xB8\xAC\x9D"
+ "\xA1\x00\x39\xDA\x3D\x2D\x0E\x01"
+ "\x09\x7D\x8D\x5E\xBE\x53\xB9\xB0"
+ "\x8E\xE7\xE2\x96\x6A\xB2\x78\xEA"
+ "\xDE\x23\x8B\xA5\xFA\x5C\xE3\xDA"
+ "\xBF\x8E\x31\x6A\x55\xD1\x6A\xB2"
+ "\xB5\x46\x6F\xA5\xF0\xEE\xBA\x1F"
+ "\x9F\x98\xB0\x66\x4F\xD0\x3F\xA9"
+ "\xDF\x5F\x58\xC4\xF4\xFF\x75\x5C"
+ "\x40\x3A\x09\x7E\x6E\x1C\x97\xD4"
+ "\xCC\xE7\xE7\x71\xCF\x0B\x15\x08"
+ "\x71\xFA\x07\x97\xCD\xE6\xCA\x1D"
+ "\x14\x28\x0C\xCF\x99\x13\x7A\xF1"
+ "\xEB\xFA\xFA\x92\x07\xDE\x1D\xA1"
+ "\xD3\x36\x69\xFE\x51\x4D\x9F\x2E"
+ "\x83\x37\x4F\x1F\x48\x30\xED\x04"
+ "\x4D\xA4\xEF\x3A\xCA\x76\xF4\x1C"
+ "\x41\x8F\x63\x37\x78\x2F\x86\xA6"
+ "\xEF\x41\x7E\xD2\xAF\x88\xAB\x67"
+ "\x52\x71\xC3\x8E\xF8\x26\x93\x72"
+ "\xAA\xD6\x0E\xE7\x0B\x46\xB1\x3A"
+ "\xB4\x08\xA9\xA8\xA0\xCF\x20\x0C"
+ "\x52\xBC\x8B\x05\x56\xB2\xBC\x31"
+ "\x9B\x74\xB9\x29\x29\x96\x9A\x50"
+ "\xDC\x45\xDC\x1A\xEB\x0C\x64\xD4"
+ "\xD3\x05\x7E\x59\x55\xC3\xF4\x90"
+ "\xC2\xAB\xF8\x9B\x8A\xDA\xCE\xA1"
+ "\xC3\xF4\xAD\x77\xDD\x44\xC8\xAC"
+ "\xA3\xF1\xC9\xD2\x19\x5C\xB0\xCA"
+ "\xA2\x34\xC1\xF7\x6C\xFD\xAC\x65"
+ "\x32\xDC\x48\xC4\xF2\x00\x6B\x77"
+ "\xF1\x7D\x76\xAC\xC0\x31\x63\x2A"
+ "\xA5\x3A\x62\xC8\x91\xB1\x03\x65"
+ "\xCB\x43\xD1\x06\xDF\xC3\x67\xBC"
+ "\xDC\xE0\xCD\x35\xCE\x49\x65\xA0"
+ "\x52\x7B\xA7\x0D\x07\xA9\x1B\xB0"
+ "\x40\x77\x72\xC2\xEA\x0E\x3A\x78"
+ "\x46\xB9\x91\xB6\xE7\x3D\x51\x42"
+ "\xFD\x51\xB0\xC6\x2C\x63\x13\x78"
+ "\x5C\xEE\xFC\xCF\xC4\x70\x00\x34",
+ .ilen = 496,
+ .result = "\x05\xEC\x77\xFB\x42\xD5\x59\x20"
+ "\x8B\x12\x86\x69\xF0\x5B\xCF\x56"
+ "\x39\xAD\x34\x9F\x66\xEA\x7D\xC4"
+ "\x48\xD3\xBA\x0D\xB1\x18\xE3\x4A"
+ "\xFE\x41\x28\x5C\x27\x8E\x11\x85"
+ "\x6C\xF7\x5E\xC2\x55\x3C\xA0\x0B"
+ "\x92\x65\xE9\x70\xDB\x4F\xD6\xB9"
+ "\x00\xB4\x1F\xE6\x49\xFD\x44\x2F"
+ "\x53\x3A\x8D\x14\x98\x63\xCA\x5D"
+ "\xC1\xA8\x33\xA7\x0E\x91\x78\xEC"
+ "\x77\xDE\x42\xD5\xBC\x07\x8B\x12"
+ "\xE5\x4C\xF0\x5B\x22\x56\x39\x80"
+ "\x6B\x9F\x66\xC9\x50\xC4\xAF\x36"
+ "\xBA\x0D\x94\x7F\xE3\x4A\xDD\x41"
+ "\x28\xB3\x1A\x8E\x11\xF8\x43\xF7"
+ "\x5E\x21\x55\x3C\x87\x6E\x92\x65"
+ "\xCC\x57\xDB\xA2\x35\xB9\x00\xEB"
+ "\x72\xE6\x49\xD0\x44\x2F\xB6\x19"
+ "\x8D\x14\xFF\x46\xCA\x5D\x24\xA8"
+ "\x33\x9A\x6D\x91\x78\xC3\x77\xDE"
+ "\xA1\x08\xBC\x07\xEE\x71\xE5\x4C"
+ "\xD7\x5B\x22\xB5\x1C\x80\x6B\xF2"
+ "\x45\xC9\x50\x3B\xAF\x36\x99\x60"
+ "\x94\x7F\xC6\x4A\xDD\xA4\x0F\xB3"
+ "\x1A\xED\x74\xF8\x43\x2A\x5E\x21"
+ "\x88\x13\x87\x6E\xF1\x58\xCC\x57"
+ "\x3E\xA2\x35\x9C\x67\xEB\x72\xC5"
+ "\x49\xD0\xBB\x02\xB6\x19\xE0\x4B"
+ "\xFF\x46\x29\x5D\x24\x8F\x16\x9A"
+ "\x6D\xF4\x5F\xC3\xAA\x3D\xA1\x08"
+ "\x93\x7A\xEE\x71\xD8\x4C\xD7\xBE"
+ "\x01\xB5\x1C\xE7\x4E\xF2\x45\x2C"
+ "\x50\x3B\x82\x15\x99\x60\xCB\x52"
+ "\xC6\xA9\x30\xA4\x0F\x96\x79\xED"
+ "\x74\xDF\x43\x2A\xBD\x04\x88\x13"
+ "\xFA\x4D\xF1\x58\x23\x57\x3E\x81"
+ "\x68\x9C\x67\xCE\x51\xC5\xAC\x37"
+ "\xBB\x02\x95\x7C\xE0\x4B\xD2\x46"
+ "\x29\xB0\x1B\x8F\x16\xF9\x40\xF4"
+ "\x5F\x26\xAA\x3D\x84\x6F\x93\x7A"
+ "\xCD\x54\xD8\xA3\x0A\xBE\x01\xE8"
+ "\x73\xE7\x4E\xD1\x45\x2C\xB7\x1E"
+ "\x82\x15\xFC\x47\xCB\x52\x25\xA9"
+ "\x30\x9B\x62\x96\x79\xC0\x74\xDF"
+ "\xA6\x09\xBD\x04\xEF\x76\xFA\x4D"
+ "\xD4\x58\x23\x8A\x1D\x81\x68\xF3"
+ "\x5A\xCE\x51\x38\xAC\x37\x9E\x61"
+ "\x95\x7C\xC7\x4B\xD2\xA5\x0C\xB0"
+ "\x1B\xE2\x75\xF9\x40\x2B\x5F\x26"
+ "\x89\x10\x84\x6F\xF6\x59\xCD\x54"
+ "\x3F\xA3\x0A\x9D\x64\xE8\x73\xDA"
+ "\x4E\xD1\xB8\x03\xB7\x1E\xE1\x48"
+ "\xFC\x47\x2E\x52\x25\x8C\x17\x9B"
+ "\x62\xF5\x5C\xC0\xAB\x32\xA6\x09"
+ "\x90\x7B\xEF\x76\xD9\x4D\xD4\xBF"
+ "\x06\x8A\x1D\xE4\x4F\xF3\x5A\x2D"
+ "\x51\x38\x83\x6A\x9E\x61\xC8\x53"
+ "\xC7\xAE\x31\xA5\x0C\x97\x7E\xE2"
+ "\x75\xDC\x40\x2B\xB2\x05\x89\x10"
+ "\xFB\x42\xF6\x59\x20\x54\x3F\x86"
+ "\x69\x9D\x64\xCF\x56\xDA\xAD\x34"
+ "\xB8\x03\xEA\x7D\xE1\x48\xD3\x47",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ }, { /* Generated with Crypto++ */
+ .key = "\x9C\xD6\xF3\x9C\xB9\x5A\x67\x00"
+ "\x5A\x67\x00\x2D\xCE\xEB\x2D\xCE"
+ "\xEB\xB4\x51\x72\xB4\x51\x72\x1F",
+ .klen = 24,
+ .iv = "\xB2\xD7\x48\xED\x06\x44\xF9\x12"
+ "\xB7\x28\x4D\x83\x24\x59\xF2\x17",
+ .input = "\x23\xFF\x5C\x99\x75\xBB\x1F\xD4"
+ "\xBC\x27\x9D\x36\x60\xA9\xC9\xF7"
+ "\x94\x9D\x1B\xFF\x8E\x95\x57\x89"
+ "\x8C\x2E\x33\x70\x43\x61\xE6\xD2"
+ "\x82\x33\x63\xB6\xC4\x34\x5E\xF8"
+ "\x96\x07\xA7\xD2\x3B\x8E\xC9\xAA"
+ "\x7C\xA0\x55\x89\x2E\xE1\x85\x25"
+ "\x14\x04\xDA\x6B\xE0\xEE\x56\xCF"
+ "\x08\x2E\x69\xD4\x54\xDE\x22\x84"
+ "\x69\xA6\xA7\xD3\x3A\x9A\xE8\x05"
+ "\x63\xDB\xBF\x46\x3A\x26\x2E\x0F"
+ "\x58\x5C\x46\xEA\x07\x40\xDA\xE1"
+ "\x14\x1D\xCD\x4F\x06\xC0\xCA\x54"
+ "\x1E\xC9\x45\x85\x67\x7C\xC2\xB5"
+ "\x97\x5D\x61\x78\x2E\x46\xEC\x6A"
+ "\x53\xF4\xD0\xAE\xFA\xB4\x86\x29"
+ "\x9F\x17\x33\x24\xD8\xB9\xB2\x05"
+ "\x93\x88\xEA\xF7\xA0\x70\x69\x49"
+ "\x88\x6B\x73\x40\x41\x8D\xD9\xD9"
+ "\x7E\x78\xE9\xBE\x6C\x14\x22\x7A"
+ "\x66\xE1\xDA\xED\x10\xFF\x69\x1D"
+ "\xB9\xAA\xF2\x56\x72\x1B\x23\xE2"
+ "\x45\x54\x8B\xA3\x70\x23\xB4\x5E"
+ "\x8E\x96\xC9\x05\x00\xB3\xB6\xC2"
+ "\x2A\x02\x43\x7A\x62\xD5\xC8\xD2"
+ "\xC2\xD0\xE4\x78\xA1\x7B\x3E\xE8"
+ "\x9F\x7F\x7D\x40\x54\x30\x3B\xC0"
+ "\xA5\x54\xFD\xCA\x25\xEC\x44\x3E"
+ "\x1A\x54\x7F\x88\xD0\xE1\xFE\x71"
+ "\xCE\x05\x49\x89\xBA\xD6\x72\xE7"
+ "\xD6\x5D\x3F\xA2\xD9\xAB\xC5\x02"
+ "\xD6\x43\x22\xAF\xA2\xE4\x80\x85"
+ "\xD7\x87\xB9\xEA\x43\xDB\xC8\xEF"
+ "\x5C\x82\x2E\x98\x0D\x30\x41\x6B"
+ "\x08\x48\x8D\xF0\xF8\x60\xD7\x9D"
+ "\xE9\xDE\x40\xAD\x0D\xAD\x0D\x58"
+ "\x2A\x98\x35\xFE\xF7\xDD\x4B\x40"
+ "\xDE\xB0\x05\xD9\x7B\x09\x4D\xBC"
+ "\x42\xC0\xF1\x15\x0B\xFA\x26\x6B"
+ "\xC6\x12\x13\x4F\xCB\x35\xBA\x35"
+ "\xDD\x7A\x36\x9C\x12\x57\x55\x83"
+ "\x78\x58\x09\xD0\xB0\xCF\x7C\x5C"
+ "\x38\xCF\xBD\x79\x5B\x13\x4D\x97"
+ "\xC1\x85\x6F\x97\xC9\xE8\xC2\xA4"
+ "\x98\xE2\xBD\x77\x6B\x53\x39\x1A"
+ "\x28\x10\xE7\xE0\xE7\xDE\x9D\x69"
+ "\x78\x6F\x8E\xD2\xD9\x5D\xD2\x15"
+ "\x9E\xB5\x4D\x8C\xC0\x78\x22\x2F"
+ "\x17\x11\x2E\x99\xD7\xE3\xA4\x4F"
+ "\x65\xA5\x6B\x03\x2C\x35\x6F\xDA"
+ "\x8A\x19\x08\xE1\x08\x48\x59\x51"
+ "\x53\x4B\xD1\xDF\xDA\x14\x50\x5F"
+ "\xDF\xB5\x8C\xDF\xC6\xFD\x85\xFA"
+ "\xD4\xF9\x64\x45\x65\x0D\x7D\xF4"
+ "\xC8\xCD\x3F\x32\xAF\xDD\x30\xED"
+ "\x7B\xAA\xAC\xF0\xDA\x7F\xDF\x75"
+ "\x1C\xA4\xF1\xCB\x5E\x4F\x0B\xB4"
+ "\x97\x73\x28\xDE\xCF\xAF\x82\xBD"
+ "\xC4\xBA\xB4\x9C\x0D\x16\x77\x42"
+ "\x42\x39\x7C\x53\xA4\xD4\xDD\x40"
+ "\x5C\x60\x1F\x6E\xA7\xE2\xDC\xE7"
+ "\x32\x0F\x05\x2F\xF2\x4C\x95\x3B"
+ "\xF2\x79\xD9",
+ .ilen = 499,
+ .result = "\x05\xEC\x77\xFB\x42\xD5\x59\x20"
+ "\x8B\x12\x86\x69\xF0\x5B\xCF\x56"
+ "\x39\xAD\x34\x9F\x66\xEA\x7D\xC4"
+ "\x48\xD3\xBA\x0D\xB1\x18\xE3\x4A"
+ "\xFE\x41\x28\x5C\x27\x8E\x11\x85"
+ "\x6C\xF7\x5E\xC2\x55\x3C\xA0\x0B"
+ "\x92\x65\xE9\x70\xDB\x4F\xD6\xB9"
+ "\x00\xB4\x1F\xE6\x49\xFD\x44\x2F"
+ "\x53\x3A\x8D\x14\x98\x63\xCA\x5D"
+ "\xC1\xA8\x33\xA7\x0E\x91\x78\xEC"
+ "\x77\xDE\x42\xD5\xBC\x07\x8B\x12"
+ "\xE5\x4C\xF0\x5B\x22\x56\x39\x80"
+ "\x6B\x9F\x66\xC9\x50\xC4\xAF\x36"
+ "\xBA\x0D\x94\x7F\xE3\x4A\xDD\x41"
+ "\x28\xB3\x1A\x8E\x11\xF8\x43\xF7"
+ "\x5E\x21\x55\x3C\x87\x6E\x92\x65"
+ "\xCC\x57\xDB\xA2\x35\xB9\x00\xEB"
+ "\x72\xE6\x49\xD0\x44\x2F\xB6\x19"
+ "\x8D\x14\xFF\x46\xCA\x5D\x24\xA8"
+ "\x33\x9A\x6D\x91\x78\xC3\x77\xDE"
+ "\xA1\x08\xBC\x07\xEE\x71\xE5\x4C"
+ "\xD7\x5B\x22\xB5\x1C\x80\x6B\xF2"
+ "\x45\xC9\x50\x3B\xAF\x36\x99\x60"
+ "\x94\x7F\xC6\x4A\xDD\xA4\x0F\xB3"
+ "\x1A\xED\x74\xF8\x43\x2A\x5E\x21"
+ "\x88\x13\x87\x6E\xF1\x58\xCC\x57"
+ "\x3E\xA2\x35\x9C\x67\xEB\x72\xC5"
+ "\x49\xD0\xBB\x02\xB6\x19\xE0\x4B"
+ "\xFF\x46\x29\x5D\x24\x8F\x16\x9A"
+ "\x6D\xF4\x5F\xC3\xAA\x3D\xA1\x08"
+ "\x93\x7A\xEE\x71\xD8\x4C\xD7\xBE"
+ "\x01\xB5\x1C\xE7\x4E\xF2\x45\x2C"
+ "\x50\x3B\x82\x15\x99\x60\xCB\x52"
+ "\xC6\xA9\x30\xA4\x0F\x96\x79\xED"
+ "\x74\xDF\x43\x2A\xBD\x04\x88\x13"
+ "\xFA\x4D\xF1\x58\x23\x57\x3E\x81"
+ "\x68\x9C\x67\xCE\x51\xC5\xAC\x37"
+ "\xBB\x02\x95\x7C\xE0\x4B\xD2\x46"
+ "\x29\xB0\x1B\x8F\x16\xF9\x40\xF4"
+ "\x5F\x26\xAA\x3D\x84\x6F\x93\x7A"
+ "\xCD\x54\xD8\xA3\x0A\xBE\x01\xE8"
+ "\x73\xE7\x4E\xD1\x45\x2C\xB7\x1E"
+ "\x82\x15\xFC\x47\xCB\x52\x25\xA9"
+ "\x30\x9B\x62\x96\x79\xC0\x74\xDF"
+ "\xA6\x09\xBD\x04\xEF\x76\xFA\x4D"
+ "\xD4\x58\x23\x8A\x1D\x81\x68\xF3"
+ "\x5A\xCE\x51\x38\xAC\x37\x9E\x61"
+ "\x95\x7C\xC7\x4B\xD2\xA5\x0C\xB0"
+ "\x1B\xE2\x75\xF9\x40\x2B\x5F\x26"
+ "\x89\x10\x84\x6F\xF6\x59\xCD\x54"
+ "\x3F\xA3\x0A\x9D\x64\xE8\x73\xDA"
+ "\x4E\xD1\xB8\x03\xB7\x1E\xE1\x48"
+ "\xFC\x47\x2E\x52\x25\x8C\x17\x9B"
+ "\x62\xF5\x5C\xC0\xAB\x32\xA6\x09"
+ "\x90\x7B\xEF\x76\xD9\x4D\xD4\xBF"
+ "\x06\x8A\x1D\xE4\x4F\xF3\x5A\x2D"
+ "\x51\x38\x83\x6A\x9E\x61\xC8\x53"
+ "\xC7\xAE\x31\xA5\x0C\x97\x7E\xE2"
+ "\x75\xDC\x40\x2B\xB2\x05\x89\x10"
+ "\xFB\x42\xF6\x59\x20\x54\x3F\x86"
+ "\x69\x9D\x64\xCF\x56\xDA\xAD\x34"
+ "\xB8\x03\xEA\x7D\xE1\x48\xD3\x47"
+ "\x2E\xB1\x18",
+ .rlen = 499,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 499 - 16, 16 },
},
};
/*
* Blowfish test vectors.
*/
-#define BF_ENC_TEST_VECTORS 6
-#define BF_DEC_TEST_VECTORS 6
-#define BF_CBC_ENC_TEST_VECTORS 1
-#define BF_CBC_DEC_TEST_VECTORS 1
+#define BF_ENC_TEST_VECTORS 7
+#define BF_DEC_TEST_VECTORS 7
+#define BF_CBC_ENC_TEST_VECTORS 2
+#define BF_CBC_DEC_TEST_VECTORS 2
+#define BF_CTR_ENC_TEST_VECTORS 2
+#define BF_CTR_DEC_TEST_VECTORS 2
static struct cipher_testvec bf_enc_tv_template[] = {
{ /* DES test vectors from OpenSSL */
@@ -2306,6 +5047,143 @@ static struct cipher_testvec bf_enc_tv_template[] = {
.ilen = 8,
.result = "\xc0\x45\x04\x01\x2e\x4e\x1f\x53",
.rlen = 8,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06",
+ .ilen = 504,
+ .result = "\x96\x87\x3D\x0C\x7B\xFB\xBD\x1F"
+ "\xE3\xC1\x99\x6D\x39\xD4\xC2\x7D"
+ "\xD7\x87\xA1\xF2\xDF\x51\x71\x26"
+ "\xC2\xF4\x6D\xFF\xF6\xCD\x6B\x40"
+ "\xE1\xB3\xBF\xD4\x38\x2B\xC8\x3B"
+ "\xD3\xB2\xD4\x61\xC7\x9F\x06\xE9"
+ "\xCD\xF3\x88\x39\x39\x7A\xDF\x19"
+ "\xE8\x03\x2A\x0B\x9E\xA0\x2B\x86"
+ "\x31\xF8\x9D\xB1\xEE\x78\x9D\xB5"
+ "\xCD\x8B\x7C\x2E\xF5\xA2\x2D\x5D"
+ "\x6E\x66\xAF\x38\x6C\xD3\x13\xED"
+ "\x14\xEA\x5D\xD0\x17\x77\x0F\x4A"
+ "\x50\xF2\xD0\x0F\xC8\xF7\x1E\x7B"
+ "\x9D\x5B\x54\x65\x4F\x16\x8A\x97"
+ "\xF3\xF6\xD4\xAA\x87\x36\x77\x72"
+ "\x99\x4A\xB5\x5E\x88\xC3\xCD\x7D"
+ "\x1D\x97\xF9\x11\xBD\xE0\x1F\x1F"
+ "\x96\x3E\x4B\x22\xF4\xC0\xE6\xB8"
+ "\x47\x82\x98\x23\x33\x36\xBC\x1B"
+ "\x36\xE7\xF6\xCF\x97\x37\x16\xC0"
+ "\x87\x31\x8B\xB0\xDB\x19\x42\xA5"
+ "\x1F\x90\x7E\x66\x34\xDD\x5E\xE9"
+ "\x4F\xB2\x2B\x9A\xDE\xB3\x5D\x71"
+ "\x4D\x68\xF0\xDC\xA6\xEA\xE3\x9B"
+ "\x60\x00\x55\x57\x06\x8B\xD5\xB3"
+ "\x86\x30\x78\xDA\x33\x9A\x9D\xCC"
+ "\xBA\x0B\x81\x06\x77\x43\xC7\xC9"
+ "\xDB\x37\x60\x11\x45\x59\x6D\x2D"
+ "\x90\x3D\x65\x3E\xD0\x13\xC6\x3C"
+ "\x0E\x78\x7D\x9A\x00\xD6\x2F\x0B"
+ "\x3B\x53\x19\x1E\xA8\x9B\x11\xD9"
+ "\x98\xE4\x7F\xC3\x6E\x51\x24\x70"
+ "\x9F\x04\x9C\xC2\x9E\x44\x84\xE3"
+ "\xE0\x8A\x44\xA2\x5C\x94\x74\x34"
+ "\x37\x52\x7C\x03\xE8\x8E\x97\xE1"
+ "\x5B\x5C\x0E\xB0\x70\xFE\x54\x3F"
+ "\xD8\x65\xA9\xC5\xCD\xEC\xF4\x45"
+ "\x55\xC5\xA7\xA3\x19\x80\x28\x51"
+ "\xBE\x64\x4A\xC1\xD4\xE1\xBE\xEB"
+ "\x73\x4C\xB6\xF9\x5F\x6D\x82\xBC"
+ "\x3E\x42\x14\x49\x88\x51\xBF\x68"
+ "\x45\x75\x27\x1B\x0A\x72\xED\xAF"
+ "\xDA\xC4\x4D\x67\x0D\xEE\x75\xE3"
+ "\x34\xDD\x91\x19\x42\x3A\xCB\xDA"
+ "\x38\xFA\x3C\x93\x62\xF2\xE3\x81"
+ "\xB3\xE4\xBB\xF6\x0D\x0B\x1D\x09"
+ "\x9C\x52\x0D\x50\x63\xA4\xB2\xD2"
+ "\x82\xA0\x23\x3F\x1F\xB6\xED\x6E"
+ "\xC2\x9C\x1C\xD0\x9A\x40\xB6\xFC"
+ "\x36\x56\x6E\x85\x73\xD7\x52\xBA"
+ "\x35\x5E\x32\x89\x5D\x42\xF5\x36"
+ "\x52\x8D\x46\x7D\xC8\x71\xAD\x33"
+ "\xE1\xAF\x6A\xA8\xEC\xBA\x1C\xDC"
+ "\xFE\x88\xE6\x16\xE4\xC8\x13\x00"
+ "\x3C\xDA\x59\x32\x38\x19\xD5\xEB"
+ "\xB6\x7F\x78\x45\x1B\x8E\x07\x8C"
+ "\x66\x52\x75\xFF\xAF\xCE\x2D\x2B"
+ "\x22\x29\xCA\xB3\x5F\x7F\xE3\x29"
+ "\xB2\xB8\x9D\xEB\x16\xC8\xC5\x1D"
+ "\xC9\x0D\x59\x82\x27\x57\x9D\x42"
+ "\x54\x59\x09\xA5\x3D\xC5\x84\x68"
+ "\x56\xEB\x36\x77\x3D\xAA\xB8\xF5"
+ "\xC9\x1A\xFB\x5D\xDE\xBB\x43\xF4",
+ .rlen = 504,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 504 - 8, 8 },
},
};
@@ -2361,6 +5239,143 @@ static struct cipher_testvec bf_dec_tv_template[] = {
.ilen = 8,
.result = "\xfe\xdc\xba\x98\x76\x54\x32\x10",
.rlen = 8,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .input = "\x96\x87\x3D\x0C\x7B\xFB\xBD\x1F"
+ "\xE3\xC1\x99\x6D\x39\xD4\xC2\x7D"
+ "\xD7\x87\xA1\xF2\xDF\x51\x71\x26"
+ "\xC2\xF4\x6D\xFF\xF6\xCD\x6B\x40"
+ "\xE1\xB3\xBF\xD4\x38\x2B\xC8\x3B"
+ "\xD3\xB2\xD4\x61\xC7\x9F\x06\xE9"
+ "\xCD\xF3\x88\x39\x39\x7A\xDF\x19"
+ "\xE8\x03\x2A\x0B\x9E\xA0\x2B\x86"
+ "\x31\xF8\x9D\xB1\xEE\x78\x9D\xB5"
+ "\xCD\x8B\x7C\x2E\xF5\xA2\x2D\x5D"
+ "\x6E\x66\xAF\x38\x6C\xD3\x13\xED"
+ "\x14\xEA\x5D\xD0\x17\x77\x0F\x4A"
+ "\x50\xF2\xD0\x0F\xC8\xF7\x1E\x7B"
+ "\x9D\x5B\x54\x65\x4F\x16\x8A\x97"
+ "\xF3\xF6\xD4\xAA\x87\x36\x77\x72"
+ "\x99\x4A\xB5\x5E\x88\xC3\xCD\x7D"
+ "\x1D\x97\xF9\x11\xBD\xE0\x1F\x1F"
+ "\x96\x3E\x4B\x22\xF4\xC0\xE6\xB8"
+ "\x47\x82\x98\x23\x33\x36\xBC\x1B"
+ "\x36\xE7\xF6\xCF\x97\x37\x16\xC0"
+ "\x87\x31\x8B\xB0\xDB\x19\x42\xA5"
+ "\x1F\x90\x7E\x66\x34\xDD\x5E\xE9"
+ "\x4F\xB2\x2B\x9A\xDE\xB3\x5D\x71"
+ "\x4D\x68\xF0\xDC\xA6\xEA\xE3\x9B"
+ "\x60\x00\x55\x57\x06\x8B\xD5\xB3"
+ "\x86\x30\x78\xDA\x33\x9A\x9D\xCC"
+ "\xBA\x0B\x81\x06\x77\x43\xC7\xC9"
+ "\xDB\x37\x60\x11\x45\x59\x6D\x2D"
+ "\x90\x3D\x65\x3E\xD0\x13\xC6\x3C"
+ "\x0E\x78\x7D\x9A\x00\xD6\x2F\x0B"
+ "\x3B\x53\x19\x1E\xA8\x9B\x11\xD9"
+ "\x98\xE4\x7F\xC3\x6E\x51\x24\x70"
+ "\x9F\x04\x9C\xC2\x9E\x44\x84\xE3"
+ "\xE0\x8A\x44\xA2\x5C\x94\x74\x34"
+ "\x37\x52\x7C\x03\xE8\x8E\x97\xE1"
+ "\x5B\x5C\x0E\xB0\x70\xFE\x54\x3F"
+ "\xD8\x65\xA9\xC5\xCD\xEC\xF4\x45"
+ "\x55\xC5\xA7\xA3\x19\x80\x28\x51"
+ "\xBE\x64\x4A\xC1\xD4\xE1\xBE\xEB"
+ "\x73\x4C\xB6\xF9\x5F\x6D\x82\xBC"
+ "\x3E\x42\x14\x49\x88\x51\xBF\x68"
+ "\x45\x75\x27\x1B\x0A\x72\xED\xAF"
+ "\xDA\xC4\x4D\x67\x0D\xEE\x75\xE3"
+ "\x34\xDD\x91\x19\x42\x3A\xCB\xDA"
+ "\x38\xFA\x3C\x93\x62\xF2\xE3\x81"
+ "\xB3\xE4\xBB\xF6\x0D\x0B\x1D\x09"
+ "\x9C\x52\x0D\x50\x63\xA4\xB2\xD2"
+ "\x82\xA0\x23\x3F\x1F\xB6\xED\x6E"
+ "\xC2\x9C\x1C\xD0\x9A\x40\xB6\xFC"
+ "\x36\x56\x6E\x85\x73\xD7\x52\xBA"
+ "\x35\x5E\x32\x89\x5D\x42\xF5\x36"
+ "\x52\x8D\x46\x7D\xC8\x71\xAD\x33"
+ "\xE1\xAF\x6A\xA8\xEC\xBA\x1C\xDC"
+ "\xFE\x88\xE6\x16\xE4\xC8\x13\x00"
+ "\x3C\xDA\x59\x32\x38\x19\xD5\xEB"
+ "\xB6\x7F\x78\x45\x1B\x8E\x07\x8C"
+ "\x66\x52\x75\xFF\xAF\xCE\x2D\x2B"
+ "\x22\x29\xCA\xB3\x5F\x7F\xE3\x29"
+ "\xB2\xB8\x9D\xEB\x16\xC8\xC5\x1D"
+ "\xC9\x0D\x59\x82\x27\x57\x9D\x42"
+ "\x54\x59\x09\xA5\x3D\xC5\x84\x68"
+ "\x56\xEB\x36\x77\x3D\xAA\xB8\xF5"
+ "\xC9\x1A\xFB\x5D\xDE\xBB\x43\xF4",
+ .ilen = 504,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06",
+ .rlen = 504,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 504 - 8, 8 },
},
};
@@ -2380,6 +5395,144 @@ static struct cipher_testvec bf_cbc_enc_tv_template[] = {
"\x58\xde\xb9\xe7\x15\x46\x16\xd9"
"\x59\xf1\x65\x2b\xd5\xff\x92\xcc",
.rlen = 32,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06",
+ .ilen = 504,
+ .result = "\xB4\xFE\xA5\xBB\x3D\x2C\x27\x06"
+ "\x06\x2B\x3A\x92\xB2\xF5\x5E\x62"
+ "\x84\xCD\xF7\x66\x7E\x41\x6C\x8E"
+ "\x1B\xD9\x02\xB6\x48\xB0\x87\x25"
+ "\x01\x9C\x93\x63\x51\x60\x82\xD2"
+ "\x4D\xE5\xC2\xB7\xAE\x60\xD8\xAD"
+ "\x9F\xAB\x6C\xFA\x20\x05\xDA\x6F"
+ "\x1F\xD1\xD8\x36\x0F\xB5\x16\x69"
+ "\x3C\xAF\xB3\x30\x18\x33\xE6\xB5"
+ "\x43\x29\x9D\x94\xF4\x2F\x0A\x65"
+ "\x40\xB2\xB2\xB2\x42\x89\xEE\x8A"
+ "\x60\xD3\x52\xA8\xED\x91\xDF\xE1"
+ "\x91\x73\x7C\x28\xA1\x14\xC3\x4C"
+ "\x82\x72\x4B\x7D\x7D\x32\xD5\x19"
+ "\xE8\xB8\x6B\x30\x21\x09\x0E\x27"
+ "\x10\x9D\x2D\x3A\x6A\x4B\x7B\xE6"
+ "\x8D\x4E\x02\x32\xFF\x7F\x8E\x13"
+ "\xB0\x96\xF4\xC2\xA1\x60\x8A\x69"
+ "\xEF\x0F\x86\xD0\x25\x13\x1A\x7C"
+ "\x6E\xF0\x41\xA3\xFB\xB3\xAB\x40"
+ "\x7D\x19\xA0\x11\x4F\x3E\x1D\x43"
+ "\x65\xFE\x15\x40\xD0\x62\x41\x02"
+ "\xEA\x0C\x7A\xC3\x84\xEE\xB0\xBE"
+ "\xBE\xC8\x57\x51\xCD\x4F\xAD\x5C"
+ "\xCC\x79\xBA\x0D\x85\x3A\xED\x6B"
+ "\xAC\x6B\xA3\x4D\xBC\xE8\x02\x6A"
+ "\xC2\x6D\xBD\x5E\x89\x95\x86\x43"
+ "\x2C\x17\x4B\xC6\x40\xA2\xBD\x24"
+ "\x04\xF0\x86\x08\x78\x18\x42\xE0"
+ "\x39\x1B\x22\x9E\x89\x4C\x04\x6B"
+ "\x65\xC5\xB6\x0E\xF6\x63\xFC\xD7"
+ "\xAE\x9E\x87\x13\xCC\xD3\x1A\xEC"
+ "\xF0\x51\xCC\x93\x68\xFC\xE9\x19"
+ "\x7C\x4E\x9B\xCC\x17\xAD\xD2\xFC"
+ "\x97\x18\x92\xFF\x15\x11\xCE\xED"
+ "\x04\x41\x05\xA3\x92\xFF\x3B\xE6"
+ "\xB6\x8C\x90\xC6\xCD\x15\xA0\x04"
+ "\x25\x8B\x5D\x5B\x5F\xDB\xAE\x68"
+ "\xEF\xB3\x61\x18\xDB\x83\x9B\x39"
+ "\xCA\x82\xD1\x88\xF0\xA2\x5C\x02"
+ "\x87\xBD\x8D\x8F\xBB\x62\xF0\x35"
+ "\x75\x6F\x06\x81\x0A\x97\x4D\xF0"
+ "\x43\x12\x73\x77\xDB\x91\x83\x5B"
+ "\xE7\x3A\xA6\x07\x7B\xBF\x2C\x50"
+ "\x94\xDE\x7B\x65\xDA\x1C\xF1\x9F"
+ "\x7E\x12\x40\xB2\x3E\x19\x23\xF1"
+ "\x7C\x1B\x5F\xA8\xF3\xAC\x63\x87"
+ "\xEB\x3E\x0C\xBE\xA3\x63\x97\x88"
+ "\x8D\x27\xC6\x2A\xF8\xF2\x67\x9A"
+ "\x0D\x14\x16\x2B\x6F\xCB\xD4\x76"
+ "\x14\x48\x2E\xDE\x2A\x44\x5E\x45"
+ "\xF1\x97\x82\xEF\xB7\xAE\xED\x3A"
+ "\xED\x73\xD3\x79\xF7\x38\x1D\xD0"
+ "\xC5\xF8\x69\x83\x28\x84\x87\x56"
+ "\x3F\xAE\x81\x04\x79\x1F\xD1\x09"
+ "\xC5\xE5\x05\x0D\x64\x16\xCE\x42"
+ "\xC5\xF8\xDB\x57\x89\x33\x22\xFC"
+ "\xB4\xD7\x94\xB9\xF3\xCC\x02\x90"
+ "\x02\xBA\x55\x1E\x24\x3E\x02\x1D"
+ "\xC6\xCD\x8F\xD9\xBD\xED\xB0\x51"
+ "\xCD\xE9\xD5\x0C\xFE\x12\x39\xA9"
+ "\x93\x9B\xEE\xB5\x97\x41\xD2\xA0"
+ "\xB4\x98\xD8\x6B\x74\xE7\x65\xF4",
+ .rlen = 504,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 504 - 8, 8 },
},
};
@@ -2399,16 +5552,984 @@ static struct cipher_testvec bf_cbc_dec_tv_template[] = {
"\x68\x65\x20\x74\x69\x6d\x65\x20"
"\x66\x6f\x72\x20\x00\x00\x00\x00",
.rlen = 32,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\xB4\xFE\xA5\xBB\x3D\x2C\x27\x06"
+ "\x06\x2B\x3A\x92\xB2\xF5\x5E\x62"
+ "\x84\xCD\xF7\x66\x7E\x41\x6C\x8E"
+ "\x1B\xD9\x02\xB6\x48\xB0\x87\x25"
+ "\x01\x9C\x93\x63\x51\x60\x82\xD2"
+ "\x4D\xE5\xC2\xB7\xAE\x60\xD8\xAD"
+ "\x9F\xAB\x6C\xFA\x20\x05\xDA\x6F"
+ "\x1F\xD1\xD8\x36\x0F\xB5\x16\x69"
+ "\x3C\xAF\xB3\x30\x18\x33\xE6\xB5"
+ "\x43\x29\x9D\x94\xF4\x2F\x0A\x65"
+ "\x40\xB2\xB2\xB2\x42\x89\xEE\x8A"
+ "\x60\xD3\x52\xA8\xED\x91\xDF\xE1"
+ "\x91\x73\x7C\x28\xA1\x14\xC3\x4C"
+ "\x82\x72\x4B\x7D\x7D\x32\xD5\x19"
+ "\xE8\xB8\x6B\x30\x21\x09\x0E\x27"
+ "\x10\x9D\x2D\x3A\x6A\x4B\x7B\xE6"
+ "\x8D\x4E\x02\x32\xFF\x7F\x8E\x13"
+ "\xB0\x96\xF4\xC2\xA1\x60\x8A\x69"
+ "\xEF\x0F\x86\xD0\x25\x13\x1A\x7C"
+ "\x6E\xF0\x41\xA3\xFB\xB3\xAB\x40"
+ "\x7D\x19\xA0\x11\x4F\x3E\x1D\x43"
+ "\x65\xFE\x15\x40\xD0\x62\x41\x02"
+ "\xEA\x0C\x7A\xC3\x84\xEE\xB0\xBE"
+ "\xBE\xC8\x57\x51\xCD\x4F\xAD\x5C"
+ "\xCC\x79\xBA\x0D\x85\x3A\xED\x6B"
+ "\xAC\x6B\xA3\x4D\xBC\xE8\x02\x6A"
+ "\xC2\x6D\xBD\x5E\x89\x95\x86\x43"
+ "\x2C\x17\x4B\xC6\x40\xA2\xBD\x24"
+ "\x04\xF0\x86\x08\x78\x18\x42\xE0"
+ "\x39\x1B\x22\x9E\x89\x4C\x04\x6B"
+ "\x65\xC5\xB6\x0E\xF6\x63\xFC\xD7"
+ "\xAE\x9E\x87\x13\xCC\xD3\x1A\xEC"
+ "\xF0\x51\xCC\x93\x68\xFC\xE9\x19"
+ "\x7C\x4E\x9B\xCC\x17\xAD\xD2\xFC"
+ "\x97\x18\x92\xFF\x15\x11\xCE\xED"
+ "\x04\x41\x05\xA3\x92\xFF\x3B\xE6"
+ "\xB6\x8C\x90\xC6\xCD\x15\xA0\x04"
+ "\x25\x8B\x5D\x5B\x5F\xDB\xAE\x68"
+ "\xEF\xB3\x61\x18\xDB\x83\x9B\x39"
+ "\xCA\x82\xD1\x88\xF0\xA2\x5C\x02"
+ "\x87\xBD\x8D\x8F\xBB\x62\xF0\x35"
+ "\x75\x6F\x06\x81\x0A\x97\x4D\xF0"
+ "\x43\x12\x73\x77\xDB\x91\x83\x5B"
+ "\xE7\x3A\xA6\x07\x7B\xBF\x2C\x50"
+ "\x94\xDE\x7B\x65\xDA\x1C\xF1\x9F"
+ "\x7E\x12\x40\xB2\x3E\x19\x23\xF1"
+ "\x7C\x1B\x5F\xA8\xF3\xAC\x63\x87"
+ "\xEB\x3E\x0C\xBE\xA3\x63\x97\x88"
+ "\x8D\x27\xC6\x2A\xF8\xF2\x67\x9A"
+ "\x0D\x14\x16\x2B\x6F\xCB\xD4\x76"
+ "\x14\x48\x2E\xDE\x2A\x44\x5E\x45"
+ "\xF1\x97\x82\xEF\xB7\xAE\xED\x3A"
+ "\xED\x73\xD3\x79\xF7\x38\x1D\xD0"
+ "\xC5\xF8\x69\x83\x28\x84\x87\x56"
+ "\x3F\xAE\x81\x04\x79\x1F\xD1\x09"
+ "\xC5\xE5\x05\x0D\x64\x16\xCE\x42"
+ "\xC5\xF8\xDB\x57\x89\x33\x22\xFC"
+ "\xB4\xD7\x94\xB9\xF3\xCC\x02\x90"
+ "\x02\xBA\x55\x1E\x24\x3E\x02\x1D"
+ "\xC6\xCD\x8F\xD9\xBD\xED\xB0\x51"
+ "\xCD\xE9\xD5\x0C\xFE\x12\x39\xA9"
+ "\x93\x9B\xEE\xB5\x97\x41\xD2\xA0"
+ "\xB4\x98\xD8\x6B\x74\xE7\x65\xF4",
+ .ilen = 504,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06",
+ .rlen = 504,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 504 - 8, 8 },
+ },
+};
+
+static struct cipher_testvec bf_ctr_enc_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06",
+ .ilen = 504,
+ .result = "\xC7\xA3\xDF\xB9\x05\xF4\x9E\x8D"
+ "\x9E\xDF\x38\x18\x83\x07\xEF\xC1"
+ "\x93\x3C\xAA\xAA\xFE\x06\x42\xCC"
+ "\x0D\x70\x86\x5A\x44\xAD\x85\x17"
+ "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC"
+ "\x3D\xA7\xE9\x0A\x5C\x70\x4D\xDE"
+ "\x99\x38\x07\xCA\x1D\x21\xC1\x11"
+ "\x97\xEB\x98\x75\xC4\x73\x45\x83"
+ "\x46\x1C\x9C\x91\x87\xC1\xA0\x56"
+ "\x98\xA1\x8B\xDB\x22\x76\xBD\x62"
+ "\xA4\xBC\xE8\x86\xDA\xD2\x51\x13"
+ "\x13\xD2\x96\x68\x69\x10\x67\x0C"
+ "\xD0\x17\x25\x7C\xB2\xAE\x4F\x93"
+ "\xA6\x82\x20\xCF\x0F\xA6\x47\x79"
+ "\x88\x09\x40\x59\xBD\x12\x64\xB5"
+ "\x19\x38\x0D\xFF\x86\xD9\x42\x20"
+ "\x81\x0D\x96\x99\xAF\x22\x1F\x94"
+ "\x5C\x6E\xEC\xEA\xA3\x39\xCB\x09"
+ "\x43\x19\x7F\xD0\xBB\x10\xC2\x49"
+ "\xF7\xE9\xF2\xEE\xBF\xF7\xF8\xB3"
+ "\x0E\x1A\xF1\x8D\x70\x82\x0C\x04"
+ "\xFD\x29\x1A\xAC\xC0\x92\x48\x34"
+ "\x6A\xE3\x1D\x4F\xFC\x1C\x72\x6A"
+ "\x57\xCB\xAD\xD0\x98\xAB\xB1\x01"
+ "\x03\x6A\x45\xDD\x07\x71\x5F\x5B"
+ "\xB5\x4A\xE4\xE5\xB9\xB9\xBC\xAC"
+ "\x44\xF7\x41\xA4\x5F\x2E\xE9\x28"
+ "\xE3\x05\xD2\x94\x78\x4C\x33\x1B"
+ "\xBD\xC1\x6E\x51\xD9\xAD\xD9\x86"
+ "\x15\x4A\x78\xAE\x7B\xAD\x3B\xBC"
+ "\x2F\xE0\x0E\xC5\x7B\x54\x97\x5F"
+ "\x60\x51\x14\x65\xF9\x91\xE9\xDA"
+ "\x9A\xBC\xFC\x19\x29\x67\xAA\x63"
+ "\x5E\xF2\x48\x88\xEB\x79\xE1\xE4"
+ "\xF7\xF6\x4C\xA9\xE2\x8C\x3B\xE0"
+ "\xED\x52\xAE\x90\x8F\x5B\x98\x34"
+ "\x29\x94\x34\x7F\xF9\x6C\x1E\xB6"
+ "\xA4\xE7\x2D\x06\x54\x9D\xC3\x02"
+ "\xC1\x90\xA4\x72\x31\x6B\x24\x51"
+ "\x0B\xB3\x7C\x63\x15\xBA\xAF\x5D"
+ "\x41\xE0\x37\x6D\xBE\x41\x58\xDE"
+ "\xF2\x07\x62\x99\xBE\xC1\x8C\x0F"
+ "\x0F\x28\xFB\x8F\x0E\x1D\x91\xE2"
+ "\xDA\x99\x5C\x49\xBA\x9C\xA8\x86"
+ "\x82\x63\x11\xB3\x54\x49\x00\x08"
+ "\x07\xF2\xE8\x1F\x34\x49\x61\xF4"
+ "\x81\xE9\xF6\xA9\x5A\x28\x60\x1F"
+ "\x66\x99\x08\x06\xF2\xE8\x2D\xD1"
+ "\xD0\x67\xBA\x32\x1F\x02\x86\x7B"
+ "\xFB\x79\x3D\xC5\xB1\x7F\x15\xAF"
+ "\xD7\xBF\x31\x46\x22\x7F\xAE\x5B"
+ "\x8B\x95\x47\xC2\xB1\x62\xA1\xCE"
+ "\x52\xAC\x9C\x8B\xC2\x49\x7F\xBC"
+ "\x9C\x89\xB8\xB6\xCA\xE3\x8F\xEA"
+ "\xAC\xB4\x5D\xE4\x50\xDC\x3A\xB5"
+ "\x91\x04\x94\x99\x03\x3B\x42\x6D"
+ "\x9C\x4A\x02\xF5\xB5\x38\x98\xA8"
+ "\x5C\x97\x2E\x4D\x79\x67\x71\xAF"
+ "\xF0\x70\x77\xFF\x2D\xDA\xA0\x9E"
+ "\x23\x8D\xD6\xA6\x68\x10\x78\x9A"
+ "\x64\xBB\x15\xB8\x56\xCF\xEE\xE5"
+ "\x32\x44\x96\x1C\xD8\xEB\x95\xD2"
+ "\xF3\x71\xEF\xEB\x4E\xBB\x4D\x29",
+ .rlen = 504,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92",
+ .ilen = 503,
+ .result = "\xC7\xA3\xDF\xB9\x05\xF4\x9E\x8D"
+ "\x9E\xDF\x38\x18\x83\x07\xEF\xC1"
+ "\x93\x3C\xAA\xAA\xFE\x06\x42\xCC"
+ "\x0D\x70\x86\x5A\x44\xAD\x85\x17"
+ "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC"
+ "\x3D\xA7\xE9\x0A\x5C\x70\x4D\xDE"
+ "\x99\x38\x07\xCA\x1D\x21\xC1\x11"
+ "\x97\xEB\x98\x75\xC4\x73\x45\x83"
+ "\x46\x1C\x9C\x91\x87\xC1\xA0\x56"
+ "\x98\xA1\x8B\xDB\x22\x76\xBD\x62"
+ "\xA4\xBC\xE8\x86\xDA\xD2\x51\x13"
+ "\x13\xD2\x96\x68\x69\x10\x67\x0C"
+ "\xD0\x17\x25\x7C\xB2\xAE\x4F\x93"
+ "\xA6\x82\x20\xCF\x0F\xA6\x47\x79"
+ "\x88\x09\x40\x59\xBD\x12\x64\xB5"
+ "\x19\x38\x0D\xFF\x86\xD9\x42\x20"
+ "\x81\x0D\x96\x99\xAF\x22\x1F\x94"
+ "\x5C\x6E\xEC\xEA\xA3\x39\xCB\x09"
+ "\x43\x19\x7F\xD0\xBB\x10\xC2\x49"
+ "\xF7\xE9\xF2\xEE\xBF\xF7\xF8\xB3"
+ "\x0E\x1A\xF1\x8D\x70\x82\x0C\x04"
+ "\xFD\x29\x1A\xAC\xC0\x92\x48\x34"
+ "\x6A\xE3\x1D\x4F\xFC\x1C\x72\x6A"
+ "\x57\xCB\xAD\xD0\x98\xAB\xB1\x01"
+ "\x03\x6A\x45\xDD\x07\x71\x5F\x5B"
+ "\xB5\x4A\xE4\xE5\xB9\xB9\xBC\xAC"
+ "\x44\xF7\x41\xA4\x5F\x2E\xE9\x28"
+ "\xE3\x05\xD2\x94\x78\x4C\x33\x1B"
+ "\xBD\xC1\x6E\x51\xD9\xAD\xD9\x86"
+ "\x15\x4A\x78\xAE\x7B\xAD\x3B\xBC"
+ "\x2F\xE0\x0E\xC5\x7B\x54\x97\x5F"
+ "\x60\x51\x14\x65\xF9\x91\xE9\xDA"
+ "\x9A\xBC\xFC\x19\x29\x67\xAA\x63"
+ "\x5E\xF2\x48\x88\xEB\x79\xE1\xE4"
+ "\xF7\xF6\x4C\xA9\xE2\x8C\x3B\xE0"
+ "\xED\x52\xAE\x90\x8F\x5B\x98\x34"
+ "\x29\x94\x34\x7F\xF9\x6C\x1E\xB6"
+ "\xA4\xE7\x2D\x06\x54\x9D\xC3\x02"
+ "\xC1\x90\xA4\x72\x31\x6B\x24\x51"
+ "\x0B\xB3\x7C\x63\x15\xBA\xAF\x5D"
+ "\x41\xE0\x37\x6D\xBE\x41\x58\xDE"
+ "\xF2\x07\x62\x99\xBE\xC1\x8C\x0F"
+ "\x0F\x28\xFB\x8F\x0E\x1D\x91\xE2"
+ "\xDA\x99\x5C\x49\xBA\x9C\xA8\x86"
+ "\x82\x63\x11\xB3\x54\x49\x00\x08"
+ "\x07\xF2\xE8\x1F\x34\x49\x61\xF4"
+ "\x81\xE9\xF6\xA9\x5A\x28\x60\x1F"
+ "\x66\x99\x08\x06\xF2\xE8\x2D\xD1"
+ "\xD0\x67\xBA\x32\x1F\x02\x86\x7B"
+ "\xFB\x79\x3D\xC5\xB1\x7F\x15\xAF"
+ "\xD7\xBF\x31\x46\x22\x7F\xAE\x5B"
+ "\x8B\x95\x47\xC2\xB1\x62\xA1\xCE"
+ "\x52\xAC\x9C\x8B\xC2\x49\x7F\xBC"
+ "\x9C\x89\xB8\xB6\xCA\xE3\x8F\xEA"
+ "\xAC\xB4\x5D\xE4\x50\xDC\x3A\xB5"
+ "\x91\x04\x94\x99\x03\x3B\x42\x6D"
+ "\x9C\x4A\x02\xF5\xB5\x38\x98\xA8"
+ "\x5C\x97\x2E\x4D\x79\x67\x71\xAF"
+ "\xF0\x70\x77\xFF\x2D\xDA\xA0\x9E"
+ "\x23\x8D\xD6\xA6\x68\x10\x78\x9A"
+ "\x64\xBB\x15\xB8\x56\xCF\xEE\xE5"
+ "\x32\x44\x96\x1C\xD8\xEB\x95\xD2"
+ "\xF3\x71\xEF\xEB\x4E\xBB\x4D",
+ .rlen = 503,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 503 - 8, 8 },
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06",
+ .ilen = 504,
+ .result = "\x5F\x58\x6E\x60\x51\x6E\xDC\x3D"
+ "\xD1\xBB\xF7\xB7\xFD\x04\x44\x82"
+ "\xDC\x9F\x4B\x02\xF1\xD2\x5A\x6F"
+ "\x25\xF9\x27\x21\xF2\xD2\x9A\x01"
+ "\xBD\xAD\x3D\x93\x87\xCA\x0D\xFE"
+ "\xB7\x2C\x17\x1F\x42\x8C\x13\xB2"
+ "\x62\x44\x72\xB9\x5D\xC0\xF8\x37"
+ "\xDF\xEA\x78\x81\x8F\xA6\x34\xB2"
+ "\x07\x09\x7C\xB9\x3A\xA0\x2B\x18"
+ "\x34\x6A\x9D\x3D\xA5\xEB\xF4\x60"
+ "\xF8\x98\xA2\x39\x81\x23\x6C\xA9"
+ "\x70\xCA\xCC\x45\xD8\x1F\xDF\x44"
+ "\x2A\x67\x7A\x88\x28\xDC\x36\x83"
+ "\x18\xD7\x48\x43\x17\x2B\x1B\xE6"
+ "\x0B\x82\x59\x14\x26\x67\x08\x09"
+ "\x5B\x5D\x38\xD0\x81\xCE\x54\x2A"
+ "\xCD\x22\x94\x42\xF5\xBA\x74\x7E"
+ "\xD9\x00\x40\xA9\x0D\x0B\xBD\x8E"
+ "\xC4\x8E\x5E\x17\x8F\x48\xE2\xB8"
+ "\xF4\xCC\x19\x76\xAB\x48\x29\xAA"
+ "\x81\xD5\xCE\xD5\x8A\x3B\xC9\x21"
+ "\xEF\x50\x4F\x04\x02\xBF\xE1\x1F"
+ "\x59\x28\x1A\xE4\x18\x16\xA0\x29"
+ "\xBF\x34\xA9\x2D\x28\x83\xC0\x5E"
+ "\xEA\x44\xC4\x6E\xAB\x24\x79\x9D"
+ "\x2D\xA1\xE8\x55\xCA\x74\xFC\xBD"
+ "\xFE\xDD\xDA\xA5\xFB\x34\x90\x31"
+ "\x0E\x62\x28\x9B\xDC\xD7\xA1\xBB"
+ "\xF0\x1A\xB3\xE2\xD0\xFA\xBD\xE8"
+ "\x5C\x5A\x10\x67\xF6\x6A\x17\x3F"
+ "\xC5\xE9\x09\x08\xDD\x22\x77\x42"
+ "\x26\x6A\x6A\x7A\x3F\x87\x80\x0C"
+ "\xF0\xFF\x15\x8E\x84\x86\xC0\x10"
+ "\x0F\x8D\x33\x06\xB8\x72\xA4\x47"
+ "\x6B\xED\x2E\x05\x94\x6C\x5C\x5B"
+ "\x13\xF6\x77\xEE\x3B\x16\xDF\xC2"
+ "\x63\x66\x07\x6D\x3F\x6C\x51\x7C"
+ "\x1C\xAC\x80\xB6\x58\x48\xB7\x9D"
+ "\xB4\x19\xD8\x19\x45\x66\x27\x02"
+ "\xA1\xA9\x99\xF3\x1F\xE5\xA7\x1D"
+ "\x31\xE7\x1B\x0D\xFF\xBB\xB5\xA1"
+ "\xF5\x9C\x45\x1E\x18\x19\xA1\xE7"
+ "\xC2\xF1\xBF\x68\xC3\xEC\xCF\x53"
+ "\x67\xA6\x2B\x7D\x3C\x6D\x24\xC3"
+ "\xE8\xE6\x07\x5A\x09\xE0\x32\xA8"
+ "\x52\xF6\xE9\xED\x0E\xC6\x0A\x6A"
+ "\xFC\x60\x2A\xE0\x93\xCE\xB8\x2E"
+ "\xA2\xA8\x0E\x79\x9E\x34\x5D\x37"
+ "\x6F\x12\xFE\x48\x7B\xE7\xB9\x22"
+ "\x29\xE8\xD7\xBE\x5D\xD1\x8B\xD9"
+ "\x91\x51\x4E\x71\xF2\x98\x85\x16"
+ "\x25\x7A\x76\x8A\x51\x0E\x65\x14"
+ "\x81\xB5\x3A\x37\xFD\xEC\xB5\x8A"
+ "\xE1\xCF\x41\x72\x14\x29\x4C\xF0"
+ "\x20\xD9\x9A\xC5\x66\xA4\x03\x76"
+ "\x5B\xA4\x15\x4F\x0E\x64\x39\x40"
+ "\x25\xF9\x20\x22\xF5\x88\xF5\xBA"
+ "\xE4\xDF\x45\x61\xBF\x8D\x7A\x24"
+ "\x4B\x92\x71\xD9\x2F\x77\xA7\x95"
+ "\xA8\x7F\x61\xD5\xA4\x57\xB0\xFB"
+ "\xB5\x77\xBA\x1C\xEE\x71\xFA\xB0"
+ "\x16\x4C\x18\x6B\xF2\x69\xA0\x07"
+ "\xEF\xBE\xEC\x69\xAC\xA8\x63\x9E",
+ .rlen = 504,
+ },
+};
+
+static struct cipher_testvec bf_ctr_dec_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\xC7\xA3\xDF\xB9\x05\xF4\x9E\x8D"
+ "\x9E\xDF\x38\x18\x83\x07\xEF\xC1"
+ "\x93\x3C\xAA\xAA\xFE\x06\x42\xCC"
+ "\x0D\x70\x86\x5A\x44\xAD\x85\x17"
+ "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC"
+ "\x3D\xA7\xE9\x0A\x5C\x70\x4D\xDE"
+ "\x99\x38\x07\xCA\x1D\x21\xC1\x11"
+ "\x97\xEB\x98\x75\xC4\x73\x45\x83"
+ "\x46\x1C\x9C\x91\x87\xC1\xA0\x56"
+ "\x98\xA1\x8B\xDB\x22\x76\xBD\x62"
+ "\xA4\xBC\xE8\x86\xDA\xD2\x51\x13"
+ "\x13\xD2\x96\x68\x69\x10\x67\x0C"
+ "\xD0\x17\x25\x7C\xB2\xAE\x4F\x93"
+ "\xA6\x82\x20\xCF\x0F\xA6\x47\x79"
+ "\x88\x09\x40\x59\xBD\x12\x64\xB5"
+ "\x19\x38\x0D\xFF\x86\xD9\x42\x20"
+ "\x81\x0D\x96\x99\xAF\x22\x1F\x94"
+ "\x5C\x6E\xEC\xEA\xA3\x39\xCB\x09"
+ "\x43\x19\x7F\xD0\xBB\x10\xC2\x49"
+ "\xF7\xE9\xF2\xEE\xBF\xF7\xF8\xB3"
+ "\x0E\x1A\xF1\x8D\x70\x82\x0C\x04"
+ "\xFD\x29\x1A\xAC\xC0\x92\x48\x34"
+ "\x6A\xE3\x1D\x4F\xFC\x1C\x72\x6A"
+ "\x57\xCB\xAD\xD0\x98\xAB\xB1\x01"
+ "\x03\x6A\x45\xDD\x07\x71\x5F\x5B"
+ "\xB5\x4A\xE4\xE5\xB9\xB9\xBC\xAC"
+ "\x44\xF7\x41\xA4\x5F\x2E\xE9\x28"
+ "\xE3\x05\xD2\x94\x78\x4C\x33\x1B"
+ "\xBD\xC1\x6E\x51\xD9\xAD\xD9\x86"
+ "\x15\x4A\x78\xAE\x7B\xAD\x3B\xBC"
+ "\x2F\xE0\x0E\xC5\x7B\x54\x97\x5F"
+ "\x60\x51\x14\x65\xF9\x91\xE9\xDA"
+ "\x9A\xBC\xFC\x19\x29\x67\xAA\x63"
+ "\x5E\xF2\x48\x88\xEB\x79\xE1\xE4"
+ "\xF7\xF6\x4C\xA9\xE2\x8C\x3B\xE0"
+ "\xED\x52\xAE\x90\x8F\x5B\x98\x34"
+ "\x29\x94\x34\x7F\xF9\x6C\x1E\xB6"
+ "\xA4\xE7\x2D\x06\x54\x9D\xC3\x02"
+ "\xC1\x90\xA4\x72\x31\x6B\x24\x51"
+ "\x0B\xB3\x7C\x63\x15\xBA\xAF\x5D"
+ "\x41\xE0\x37\x6D\xBE\x41\x58\xDE"
+ "\xF2\x07\x62\x99\xBE\xC1\x8C\x0F"
+ "\x0F\x28\xFB\x8F\x0E\x1D\x91\xE2"
+ "\xDA\x99\x5C\x49\xBA\x9C\xA8\x86"
+ "\x82\x63\x11\xB3\x54\x49\x00\x08"
+ "\x07\xF2\xE8\x1F\x34\x49\x61\xF4"
+ "\x81\xE9\xF6\xA9\x5A\x28\x60\x1F"
+ "\x66\x99\x08\x06\xF2\xE8\x2D\xD1"
+ "\xD0\x67\xBA\x32\x1F\x02\x86\x7B"
+ "\xFB\x79\x3D\xC5\xB1\x7F\x15\xAF"
+ "\xD7\xBF\x31\x46\x22\x7F\xAE\x5B"
+ "\x8B\x95\x47\xC2\xB1\x62\xA1\xCE"
+ "\x52\xAC\x9C\x8B\xC2\x49\x7F\xBC"
+ "\x9C\x89\xB8\xB6\xCA\xE3\x8F\xEA"
+ "\xAC\xB4\x5D\xE4\x50\xDC\x3A\xB5"
+ "\x91\x04\x94\x99\x03\x3B\x42\x6D"
+ "\x9C\x4A\x02\xF5\xB5\x38\x98\xA8"
+ "\x5C\x97\x2E\x4D\x79\x67\x71\xAF"
+ "\xF0\x70\x77\xFF\x2D\xDA\xA0\x9E"
+ "\x23\x8D\xD6\xA6\x68\x10\x78\x9A"
+ "\x64\xBB\x15\xB8\x56\xCF\xEE\xE5"
+ "\x32\x44\x96\x1C\xD8\xEB\x95\xD2"
+ "\xF3\x71\xEF\xEB\x4E\xBB\x4D\x29",
+ .ilen = 504,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06",
+ .rlen = 504,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\xC7\xA3\xDF\xB9\x05\xF4\x9E\x8D"
+ "\x9E\xDF\x38\x18\x83\x07\xEF\xC1"
+ "\x93\x3C\xAA\xAA\xFE\x06\x42\xCC"
+ "\x0D\x70\x86\x5A\x44\xAD\x85\x17"
+ "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC"
+ "\x3D\xA7\xE9\x0A\x5C\x70\x4D\xDE"
+ "\x99\x38\x07\xCA\x1D\x21\xC1\x11"
+ "\x97\xEB\x98\x75\xC4\x73\x45\x83"
+ "\x46\x1C\x9C\x91\x87\xC1\xA0\x56"
+ "\x98\xA1\x8B\xDB\x22\x76\xBD\x62"
+ "\xA4\xBC\xE8\x86\xDA\xD2\x51\x13"
+ "\x13\xD2\x96\x68\x69\x10\x67\x0C"
+ "\xD0\x17\x25\x7C\xB2\xAE\x4F\x93"
+ "\xA6\x82\x20\xCF\x0F\xA6\x47\x79"
+ "\x88\x09\x40\x59\xBD\x12\x64\xB5"
+ "\x19\x38\x0D\xFF\x86\xD9\x42\x20"
+ "\x81\x0D\x96\x99\xAF\x22\x1F\x94"
+ "\x5C\x6E\xEC\xEA\xA3\x39\xCB\x09"
+ "\x43\x19\x7F\xD0\xBB\x10\xC2\x49"
+ "\xF7\xE9\xF2\xEE\xBF\xF7\xF8\xB3"
+ "\x0E\x1A\xF1\x8D\x70\x82\x0C\x04"
+ "\xFD\x29\x1A\xAC\xC0\x92\x48\x34"
+ "\x6A\xE3\x1D\x4F\xFC\x1C\x72\x6A"
+ "\x57\xCB\xAD\xD0\x98\xAB\xB1\x01"
+ "\x03\x6A\x45\xDD\x07\x71\x5F\x5B"
+ "\xB5\x4A\xE4\xE5\xB9\xB9\xBC\xAC"
+ "\x44\xF7\x41\xA4\x5F\x2E\xE9\x28"
+ "\xE3\x05\xD2\x94\x78\x4C\x33\x1B"
+ "\xBD\xC1\x6E\x51\xD9\xAD\xD9\x86"
+ "\x15\x4A\x78\xAE\x7B\xAD\x3B\xBC"
+ "\x2F\xE0\x0E\xC5\x7B\x54\x97\x5F"
+ "\x60\x51\x14\x65\xF9\x91\xE9\xDA"
+ "\x9A\xBC\xFC\x19\x29\x67\xAA\x63"
+ "\x5E\xF2\x48\x88\xEB\x79\xE1\xE4"
+ "\xF7\xF6\x4C\xA9\xE2\x8C\x3B\xE0"
+ "\xED\x52\xAE\x90\x8F\x5B\x98\x34"
+ "\x29\x94\x34\x7F\xF9\x6C\x1E\xB6"
+ "\xA4\xE7\x2D\x06\x54\x9D\xC3\x02"
+ "\xC1\x90\xA4\x72\x31\x6B\x24\x51"
+ "\x0B\xB3\x7C\x63\x15\xBA\xAF\x5D"
+ "\x41\xE0\x37\x6D\xBE\x41\x58\xDE"
+ "\xF2\x07\x62\x99\xBE\xC1\x8C\x0F"
+ "\x0F\x28\xFB\x8F\x0E\x1D\x91\xE2"
+ "\xDA\x99\x5C\x49\xBA\x9C\xA8\x86"
+ "\x82\x63\x11\xB3\x54\x49\x00\x08"
+ "\x07\xF2\xE8\x1F\x34\x49\x61\xF4"
+ "\x81\xE9\xF6\xA9\x5A\x28\x60\x1F"
+ "\x66\x99\x08\x06\xF2\xE8\x2D\xD1"
+ "\xD0\x67\xBA\x32\x1F\x02\x86\x7B"
+ "\xFB\x79\x3D\xC5\xB1\x7F\x15\xAF"
+ "\xD7\xBF\x31\x46\x22\x7F\xAE\x5B"
+ "\x8B\x95\x47\xC2\xB1\x62\xA1\xCE"
+ "\x52\xAC\x9C\x8B\xC2\x49\x7F\xBC"
+ "\x9C\x89\xB8\xB6\xCA\xE3\x8F\xEA"
+ "\xAC\xB4\x5D\xE4\x50\xDC\x3A\xB5"
+ "\x91\x04\x94\x99\x03\x3B\x42\x6D"
+ "\x9C\x4A\x02\xF5\xB5\x38\x98\xA8"
+ "\x5C\x97\x2E\x4D\x79\x67\x71\xAF"
+ "\xF0\x70\x77\xFF\x2D\xDA\xA0\x9E"
+ "\x23\x8D\xD6\xA6\x68\x10\x78\x9A"
+ "\x64\xBB\x15\xB8\x56\xCF\xEE\xE5"
+ "\x32\x44\x96\x1C\xD8\xEB\x95\xD2"
+ "\xF3\x71\xEF\xEB\x4E\xBB\x4D",
+ .ilen = 503,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92",
+ .rlen = 503,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 503 - 8, 8 },
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x5F\x58\x6E\x60\x51\x6E\xDC\x3D"
+ "\xD1\xBB\xF7\xB7\xFD\x04\x44\x82"
+ "\xDC\x9F\x4B\x02\xF1\xD2\x5A\x6F"
+ "\x25\xF9\x27\x21\xF2\xD2\x9A\x01"
+ "\xBD\xAD\x3D\x93\x87\xCA\x0D\xFE"
+ "\xB7\x2C\x17\x1F\x42\x8C\x13\xB2"
+ "\x62\x44\x72\xB9\x5D\xC0\xF8\x37"
+ "\xDF\xEA\x78\x81\x8F\xA6\x34\xB2"
+ "\x07\x09\x7C\xB9\x3A\xA0\x2B\x18"
+ "\x34\x6A\x9D\x3D\xA5\xEB\xF4\x60"
+ "\xF8\x98\xA2\x39\x81\x23\x6C\xA9"
+ "\x70\xCA\xCC\x45\xD8\x1F\xDF\x44"
+ "\x2A\x67\x7A\x88\x28\xDC\x36\x83"
+ "\x18\xD7\x48\x43\x17\x2B\x1B\xE6"
+ "\x0B\x82\x59\x14\x26\x67\x08\x09"
+ "\x5B\x5D\x38\xD0\x81\xCE\x54\x2A"
+ "\xCD\x22\x94\x42\xF5\xBA\x74\x7E"
+ "\xD9\x00\x40\xA9\x0D\x0B\xBD\x8E"
+ "\xC4\x8E\x5E\x17\x8F\x48\xE2\xB8"
+ "\xF4\xCC\x19\x76\xAB\x48\x29\xAA"
+ "\x81\xD5\xCE\xD5\x8A\x3B\xC9\x21"
+ "\xEF\x50\x4F\x04\x02\xBF\xE1\x1F"
+ "\x59\x28\x1A\xE4\x18\x16\xA0\x29"
+ "\xBF\x34\xA9\x2D\x28\x83\xC0\x5E"
+ "\xEA\x44\xC4\x6E\xAB\x24\x79\x9D"
+ "\x2D\xA1\xE8\x55\xCA\x74\xFC\xBD"
+ "\xFE\xDD\xDA\xA5\xFB\x34\x90\x31"
+ "\x0E\x62\x28\x9B\xDC\xD7\xA1\xBB"
+ "\xF0\x1A\xB3\xE2\xD0\xFA\xBD\xE8"
+ "\x5C\x5A\x10\x67\xF6\x6A\x17\x3F"
+ "\xC5\xE9\x09\x08\xDD\x22\x77\x42"
+ "\x26\x6A\x6A\x7A\x3F\x87\x80\x0C"
+ "\xF0\xFF\x15\x8E\x84\x86\xC0\x10"
+ "\x0F\x8D\x33\x06\xB8\x72\xA4\x47"
+ "\x6B\xED\x2E\x05\x94\x6C\x5C\x5B"
+ "\x13\xF6\x77\xEE\x3B\x16\xDF\xC2"
+ "\x63\x66\x07\x6D\x3F\x6C\x51\x7C"
+ "\x1C\xAC\x80\xB6\x58\x48\xB7\x9D"
+ "\xB4\x19\xD8\x19\x45\x66\x27\x02"
+ "\xA1\xA9\x99\xF3\x1F\xE5\xA7\x1D"
+ "\x31\xE7\x1B\x0D\xFF\xBB\xB5\xA1"
+ "\xF5\x9C\x45\x1E\x18\x19\xA1\xE7"
+ "\xC2\xF1\xBF\x68\xC3\xEC\xCF\x53"
+ "\x67\xA6\x2B\x7D\x3C\x6D\x24\xC3"
+ "\xE8\xE6\x07\x5A\x09\xE0\x32\xA8"
+ "\x52\xF6\xE9\xED\x0E\xC6\x0A\x6A"
+ "\xFC\x60\x2A\xE0\x93\xCE\xB8\x2E"
+ "\xA2\xA8\x0E\x79\x9E\x34\x5D\x37"
+ "\x6F\x12\xFE\x48\x7B\xE7\xB9\x22"
+ "\x29\xE8\xD7\xBE\x5D\xD1\x8B\xD9"
+ "\x91\x51\x4E\x71\xF2\x98\x85\x16"
+ "\x25\x7A\x76\x8A\x51\x0E\x65\x14"
+ "\x81\xB5\x3A\x37\xFD\xEC\xB5\x8A"
+ "\xE1\xCF\x41\x72\x14\x29\x4C\xF0"
+ "\x20\xD9\x9A\xC5\x66\xA4\x03\x76"
+ "\x5B\xA4\x15\x4F\x0E\x64\x39\x40"
+ "\x25\xF9\x20\x22\xF5\x88\xF5\xBA"
+ "\xE4\xDF\x45\x61\xBF\x8D\x7A\x24"
+ "\x4B\x92\x71\xD9\x2F\x77\xA7\x95"
+ "\xA8\x7F\x61\xD5\xA4\x57\xB0\xFB"
+ "\xB5\x77\xBA\x1C\xEE\x71\xFA\xB0"
+ "\x16\x4C\x18\x6B\xF2\x69\xA0\x07"
+ "\xEF\xBE\xEC\x69\xAC\xA8\x63\x9E",
+ .ilen = 504,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06",
+ .rlen = 504,
},
};
/*
* Twofish test vectors.
*/
-#define TF_ENC_TEST_VECTORS 3
-#define TF_DEC_TEST_VECTORS 3
-#define TF_CBC_ENC_TEST_VECTORS 4
-#define TF_CBC_DEC_TEST_VECTORS 4
+#define TF_ENC_TEST_VECTORS 4
+#define TF_DEC_TEST_VECTORS 4
+#define TF_CBC_ENC_TEST_VECTORS 5
+#define TF_CBC_DEC_TEST_VECTORS 5
+#define TF_CTR_ENC_TEST_VECTORS 2
+#define TF_CTR_DEC_TEST_VECTORS 2
+#define TF_LRW_ENC_TEST_VECTORS 8
+#define TF_LRW_DEC_TEST_VECTORS 8
+#define TF_XTS_ENC_TEST_VECTORS 5
+#define TF_XTS_DEC_TEST_VECTORS 5
static struct cipher_testvec tf_enc_tv_template[] = {
{
@@ -2440,6 +6561,141 @@ static struct cipher_testvec tf_enc_tv_template[] = {
.result = "\x37\x52\x7b\xe0\x05\x23\x34\xb8"
"\x9f\x0c\xfc\xca\xe8\x7c\xfa\x20",
.rlen = 16,
+ }, { /* Generated with Crypto++ */
+ .key = "\x3F\x85\x62\x3F\x1C\xF9\xD6\x1C"
+ "\xF9\xD6\xB3\x90\x6D\x4A\x90\x6D"
+ "\x4A\x27\x04\xE1\x27\x04\xE1\xBE"
+ "\x9B\x78\xBE\x9B\x78\x55\x32\x0F",
+ .klen = 32,
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\x88\xCB\x1E\xC2\xAF\x8A\x97\xFF"
+ "\xF6\x90\x46\x9C\x4A\x0F\x08\xDC"
+ "\xDE\xAB\xAD\xFA\xFC\xA8\xC2\x3D"
+ "\xE0\xE4\x8B\x3F\xD5\xA3\xF7\x14"
+ "\x34\x9E\xB6\x08\xB2\xDD\xA8\xF5"
+ "\xDF\xFA\xC7\xE8\x09\x50\x76\x08"
+ "\xA2\xB6\x6A\x59\xC0\x2B\x6D\x05"
+ "\x89\xF6\x82\xF0\xD3\xDB\x06\x02"
+ "\xB5\x11\x5C\x5E\x79\x1A\xAC\x43"
+ "\x5C\xC0\x30\x4B\x6B\x16\xA1\x40"
+ "\x80\x27\x88\xBA\x2C\x74\x42\xE0"
+ "\x1B\xA5\x85\x08\xB9\xE6\x22\x7A"
+ "\x36\x3B\x0D\x9F\xA0\x22\x6C\x2A"
+ "\x91\x75\x47\xBC\x67\x21\x4E\xF9"
+ "\xEA\xFF\xD9\xD5\xC0\xFC\x9E\x2C"
+ "\x3E\xAD\xC6\x61\x0E\x93\x7A\x22"
+ "\x09\xC8\x8D\xC1\x8E\xB4\x8B\x5C"
+ "\xC6\x24\x42\xB8\x23\x66\x80\xA9"
+ "\x32\x0B\x7A\x29\xBF\xB3\x0B\x63"
+ "\x43\x27\x13\xA9\xBE\xEB\xBD\xF3"
+ "\x33\x62\x70\xE2\x1B\x86\x7A\xA1"
+ "\x51\x4A\x16\xFE\x29\x63\x7E\xD0"
+ "\x7A\xA4\x6E\x2C\xF8\xC1\xDB\xE8"
+ "\xCB\x4D\xD2\x8C\x04\x14\xB4\x66"
+ "\x41\xB7\x3A\x96\x16\x7C\x1D\x5B"
+ "\xB6\x41\x42\x64\x43\xEE\x6E\x7C"
+ "\x8B\xAF\x01\x9C\xA4\x6E\x75\x8F"
+ "\xDE\x10\x9F\xA6\xE7\xD6\x44\x97"
+ "\x66\xA3\x96\x0F\x1C\x25\x60\xF5"
+ "\x3C\x2E\x32\x69\x0E\x82\xFF\x27"
+ "\x0F\xB5\x06\xDA\xD8\x31\x15\x6C"
+ "\xDF\x18\x6C\x87\xF5\x3B\x11\x9A"
+ "\x1B\x42\x1F\x5B\x29\x19\x96\x13"
+ "\x68\x2E\x5E\x08\x1C\x8F\x32\x4B"
+ "\x81\x77\x6D\xF4\xA0\x01\x42\xEC"
+ "\xDD\x5B\xFD\x3A\x8E\x6A\x14\xFB"
+ "\x83\x54\xDF\x0F\x86\xB7\xEA\x40"
+ "\x46\x39\xF7\x2A\x89\x8D\x4E\x96"
+ "\x5F\x5F\x6D\x76\xC6\x13\x9D\x3D"
+ "\x1D\x5F\x0C\x7D\xE2\xBC\xC2\x16"
+ "\x16\xBE\x89\x3E\xB0\x61\xA2\x5D"
+ "\xAF\xD1\x40\x5F\x1A\xB8\x26\x41"
+ "\xC6\xBD\x36\xEF\xED\x29\x50\x6D"
+ "\x10\xEF\x26\xE8\xA8\x93\x11\x3F"
+ "\x2D\x1F\x88\x20\x77\x45\xF5\x66"
+ "\x08\xB9\xF1\xEF\xB1\x93\xA8\x81"
+ "\x65\xC5\xCD\x3E\x8C\x06\x60\x2C"
+ "\xB2\x10\x7A\xCA\x05\x25\x59\xDB"
+ "\xC7\x28\xF5\x20\x35\x52\x9E\x62"
+ "\xF8\x88\x24\x1C\x4D\x84\x12\x39"
+ "\x39\xE4\x2E\xF4\xD4\x9D\x2B\xBC"
+ "\x87\x66\xE6\xC0\x6B\x31\x9A\x66"
+ "\x03\xDC\x95\xD8\x6B\xD0\x30\x8F"
+ "\xDF\x8F\x8D\xFA\xEC\x1F\x08\xBD"
+ "\xA3\x63\xE2\x71\x4F\x03\x94\x87"
+ "\x50\xDF\x15\x1F\xED\x3A\xA3\x7F"
+ "\x1F\x2A\xB5\xA1\x69\xAC\x4B\x0D"
+ "\x84\x9B\x2A\xE9\x55\xDD\x46\x91"
+ "\x15\x33\xF3\x2B\x9B\x46\x97\x00"
+ "\xF0\x29\xD8\x59\x5D\x33\x37\xF9"
+ "\x58\x33\x9B\x78\xC7\x58\x48\x6B"
+ "\x2C\x75\x64\xC4\xCA\xC1\x7E\xD5",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -2473,6 +6729,141 @@ static struct cipher_testvec tf_dec_tv_template[] = {
.ilen = 16,
.result = zeroed_string,
.rlen = 16,
+ }, { /* Generated with Crypto++ */
+ .key = "\x3F\x85\x62\x3F\x1C\xF9\xD6\x1C"
+ "\xF9\xD6\xB3\x90\x6D\x4A\x90\x6D"
+ "\x4A\x27\x04\xE1\x27\x04\xE1\xBE"
+ "\x9B\x78\xBE\x9B\x78\x55\x32\x0F",
+ .klen = 32,
+ .input = "\x88\xCB\x1E\xC2\xAF\x8A\x97\xFF"
+ "\xF6\x90\x46\x9C\x4A\x0F\x08\xDC"
+ "\xDE\xAB\xAD\xFA\xFC\xA8\xC2\x3D"
+ "\xE0\xE4\x8B\x3F\xD5\xA3\xF7\x14"
+ "\x34\x9E\xB6\x08\xB2\xDD\xA8\xF5"
+ "\xDF\xFA\xC7\xE8\x09\x50\x76\x08"
+ "\xA2\xB6\x6A\x59\xC0\x2B\x6D\x05"
+ "\x89\xF6\x82\xF0\xD3\xDB\x06\x02"
+ "\xB5\x11\x5C\x5E\x79\x1A\xAC\x43"
+ "\x5C\xC0\x30\x4B\x6B\x16\xA1\x40"
+ "\x80\x27\x88\xBA\x2C\x74\x42\xE0"
+ "\x1B\xA5\x85\x08\xB9\xE6\x22\x7A"
+ "\x36\x3B\x0D\x9F\xA0\x22\x6C\x2A"
+ "\x91\x75\x47\xBC\x67\x21\x4E\xF9"
+ "\xEA\xFF\xD9\xD5\xC0\xFC\x9E\x2C"
+ "\x3E\xAD\xC6\x61\x0E\x93\x7A\x22"
+ "\x09\xC8\x8D\xC1\x8E\xB4\x8B\x5C"
+ "\xC6\x24\x42\xB8\x23\x66\x80\xA9"
+ "\x32\x0B\x7A\x29\xBF\xB3\x0B\x63"
+ "\x43\x27\x13\xA9\xBE\xEB\xBD\xF3"
+ "\x33\x62\x70\xE2\x1B\x86\x7A\xA1"
+ "\x51\x4A\x16\xFE\x29\x63\x7E\xD0"
+ "\x7A\xA4\x6E\x2C\xF8\xC1\xDB\xE8"
+ "\xCB\x4D\xD2\x8C\x04\x14\xB4\x66"
+ "\x41\xB7\x3A\x96\x16\x7C\x1D\x5B"
+ "\xB6\x41\x42\x64\x43\xEE\x6E\x7C"
+ "\x8B\xAF\x01\x9C\xA4\x6E\x75\x8F"
+ "\xDE\x10\x9F\xA6\xE7\xD6\x44\x97"
+ "\x66\xA3\x96\x0F\x1C\x25\x60\xF5"
+ "\x3C\x2E\x32\x69\x0E\x82\xFF\x27"
+ "\x0F\xB5\x06\xDA\xD8\x31\x15\x6C"
+ "\xDF\x18\x6C\x87\xF5\x3B\x11\x9A"
+ "\x1B\x42\x1F\x5B\x29\x19\x96\x13"
+ "\x68\x2E\x5E\x08\x1C\x8F\x32\x4B"
+ "\x81\x77\x6D\xF4\xA0\x01\x42\xEC"
+ "\xDD\x5B\xFD\x3A\x8E\x6A\x14\xFB"
+ "\x83\x54\xDF\x0F\x86\xB7\xEA\x40"
+ "\x46\x39\xF7\x2A\x89\x8D\x4E\x96"
+ "\x5F\x5F\x6D\x76\xC6\x13\x9D\x3D"
+ "\x1D\x5F\x0C\x7D\xE2\xBC\xC2\x16"
+ "\x16\xBE\x89\x3E\xB0\x61\xA2\x5D"
+ "\xAF\xD1\x40\x5F\x1A\xB8\x26\x41"
+ "\xC6\xBD\x36\xEF\xED\x29\x50\x6D"
+ "\x10\xEF\x26\xE8\xA8\x93\x11\x3F"
+ "\x2D\x1F\x88\x20\x77\x45\xF5\x66"
+ "\x08\xB9\xF1\xEF\xB1\x93\xA8\x81"
+ "\x65\xC5\xCD\x3E\x8C\x06\x60\x2C"
+ "\xB2\x10\x7A\xCA\x05\x25\x59\xDB"
+ "\xC7\x28\xF5\x20\x35\x52\x9E\x62"
+ "\xF8\x88\x24\x1C\x4D\x84\x12\x39"
+ "\x39\xE4\x2E\xF4\xD4\x9D\x2B\xBC"
+ "\x87\x66\xE6\xC0\x6B\x31\x9A\x66"
+ "\x03\xDC\x95\xD8\x6B\xD0\x30\x8F"
+ "\xDF\x8F\x8D\xFA\xEC\x1F\x08\xBD"
+ "\xA3\x63\xE2\x71\x4F\x03\x94\x87"
+ "\x50\xDF\x15\x1F\xED\x3A\xA3\x7F"
+ "\x1F\x2A\xB5\xA1\x69\xAC\x4B\x0D"
+ "\x84\x9B\x2A\xE9\x55\xDD\x46\x91"
+ "\x15\x33\xF3\x2B\x9B\x46\x97\x00"
+ "\xF0\x29\xD8\x59\x5D\x33\x37\xF9"
+ "\x58\x33\x9B\x78\xC7\x58\x48\x6B"
+ "\x2C\x75\x64\xC4\xCA\xC1\x7E\xD5",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -2519,6 +6910,143 @@ static struct cipher_testvec tf_cbc_enc_tv_template[] = {
"\x05\xef\x8c\x61\xa8\x11\x58\x26"
"\x34\xba\x5c\xb7\x10\x6a\xa6\x41",
.rlen = 48,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\xC8\xFF\xF2\x53\xA6\x27\x09\xD1"
+ "\x33\x38\xC2\xC0\x0C\x14\x7E\xB5"
+ "\x26\x1B\x05\x0C\x05\x12\x3F\xC0"
+ "\xF9\x1C\x02\x28\x40\x96\x6F\xD0"
+ "\x3D\x32\xDF\xDA\x56\x00\x6E\xEE"
+ "\x5B\x2A\x72\x9D\xC2\x4D\x19\xBC"
+ "\x8C\x53\xFA\x87\x6F\xDD\x81\xA3"
+ "\xB1\xD3\x44\x65\xDF\xE7\x63\x38"
+ "\x4A\xFC\xDC\xEC\x3F\x26\x8E\xB8"
+ "\x43\xFC\xFE\x18\xB5\x11\x6D\x31"
+ "\x81\x8B\x0D\x75\xF6\x80\xEC\x84"
+ "\x04\xB9\xE6\x09\x63\xED\x39\xDB"
+ "\xC3\xF6\x14\xD6\x6E\x5E\x8B\xBD"
+ "\x3E\xFA\xD7\x98\x50\x6F\xD9\x63"
+ "\x02\xCD\x0D\x39\x4B\x0D\xEC\x80"
+ "\xE3\x6A\x17\xF4\xCC\xAD\xFF\x68"
+ "\x45\xDD\xC8\x83\x1D\x41\x96\x0D"
+ "\x91\x2E\x05\xD3\x59\x82\xE0\x43"
+ "\x90\x4F\xB9\xF7\xAD\x6B\x2E\xAF"
+ "\xA7\x84\x00\x53\xCD\x6F\xD1\x0C"
+ "\x4E\xF9\x5A\x23\xFB\xCA\xC7\xD3"
+ "\xA9\xAA\x9D\xB2\x3F\x66\xF1\xAC"
+ "\x25\x21\x8F\xF7\xEF\xF2\x6A\xDF"
+ "\xE8\xDA\x75\x1A\x8A\xF1\xDD\x38"
+ "\x1F\xF9\x3D\x68\x4A\xBB\x9E\x34"
+ "\x1F\x66\x1F\x9C\x2B\x54\xFF\x60"
+ "\x7F\x29\x4B\x55\x80\x8F\x4E\xA7"
+ "\xA6\x9A\x0A\xD9\x0D\x19\x00\xF8"
+ "\x1F\xBC\x0C\x40\x6B\xEC\x99\x25"
+ "\x94\x70\x74\x0E\x1D\xC5\xBC\x12"
+ "\xF3\x42\xBE\x95\xBF\xFB\x4E\x55"
+ "\x9A\xB9\xCE\x14\x16\x5B\xDC\xD3"
+ "\x75\x42\x62\x04\x31\x1F\x95\x7C"
+ "\x66\x1A\x97\xDC\x2F\x40\x5C\x39"
+ "\x78\xE6\x02\xDB\x49\xE1\xC6\x47"
+ "\xC2\x78\x9A\xBB\xF3\xBE\xCB\x93"
+ "\xD8\xB8\xE8\xBB\x8C\xB3\x9B\xA7"
+ "\xC2\x89\xF3\x91\x88\x83\x3D\xF0"
+ "\x29\xA2\xCD\xB5\x79\x16\xC2\x40"
+ "\x11\x03\x8E\x9C\xFD\xC9\x43\xC4"
+ "\xC2\x19\xF0\x4A\x32\xEF\x0C\x2B"
+ "\xD3\x2B\xE9\xD4\x4C\xDE\x95\xCF"
+ "\x04\x03\xD3\x2C\x7F\x82\xC8\xFA"
+ "\x0F\xD8\x7A\x39\x7B\x01\x41\x9C"
+ "\x78\xB6\xC9\xBF\xF9\x78\x57\x88"
+ "\xB1\xA5\xE1\xE0\xD9\x16\xD4\xC8"
+ "\xEE\xC4\xBE\x7B\x55\x59\x00\x48"
+ "\x1B\xBC\x14\xFA\x2A\x9D\xC9\x1C"
+ "\xFB\x28\x3F\x95\xDD\xB7\xD6\xCE"
+ "\x3A\x7F\x09\x0C\x0E\x69\x30\x7D"
+ "\xBC\x68\x9C\x91\x2A\x59\x57\x04"
+ "\xED\x1A\x1E\x00\xB1\x85\x92\x04"
+ "\x28\x8C\x0C\x3C\xC1\xD5\x12\xF7"
+ "\x4C\x3E\xB0\xE7\x86\x62\x68\x91"
+ "\xFC\xC4\xE2\xCE\xA6\xDC\x5E\x93"
+ "\x5D\x8D\x8C\x68\xB3\xB2\xB9\x64"
+ "\x16\xB8\xC8\x6F\xD8\xEE\x21\xBD"
+ "\xAC\x18\x0C\x7D\x0D\x05\xAB\xF1"
+ "\xFA\xDD\xE2\x48\xDF\x4C\x02\x39"
+ "\x69\xA1\x62\xBD\x49\x3A\x9D\x91"
+ "\x30\x70\x56\xA4\x37\xDD\x7C\xC0"
+ "\x0A\xA3\x30\x10\x26\x25\x41\x2C",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -2565,6 +7093,2155 @@ static struct cipher_testvec tf_cbc_dec_tv_template[] = {
.ilen = 48,
.result = zeroed_string,
.rlen = 48,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\xC8\xFF\xF2\x53\xA6\x27\x09\xD1"
+ "\x33\x38\xC2\xC0\x0C\x14\x7E\xB5"
+ "\x26\x1B\x05\x0C\x05\x12\x3F\xC0"
+ "\xF9\x1C\x02\x28\x40\x96\x6F\xD0"
+ "\x3D\x32\xDF\xDA\x56\x00\x6E\xEE"
+ "\x5B\x2A\x72\x9D\xC2\x4D\x19\xBC"
+ "\x8C\x53\xFA\x87\x6F\xDD\x81\xA3"
+ "\xB1\xD3\x44\x65\xDF\xE7\x63\x38"
+ "\x4A\xFC\xDC\xEC\x3F\x26\x8E\xB8"
+ "\x43\xFC\xFE\x18\xB5\x11\x6D\x31"
+ "\x81\x8B\x0D\x75\xF6\x80\xEC\x84"
+ "\x04\xB9\xE6\x09\x63\xED\x39\xDB"
+ "\xC3\xF6\x14\xD6\x6E\x5E\x8B\xBD"
+ "\x3E\xFA\xD7\x98\x50\x6F\xD9\x63"
+ "\x02\xCD\x0D\x39\x4B\x0D\xEC\x80"
+ "\xE3\x6A\x17\xF4\xCC\xAD\xFF\x68"
+ "\x45\xDD\xC8\x83\x1D\x41\x96\x0D"
+ "\x91\x2E\x05\xD3\x59\x82\xE0\x43"
+ "\x90\x4F\xB9\xF7\xAD\x6B\x2E\xAF"
+ "\xA7\x84\x00\x53\xCD\x6F\xD1\x0C"
+ "\x4E\xF9\x5A\x23\xFB\xCA\xC7\xD3"
+ "\xA9\xAA\x9D\xB2\x3F\x66\xF1\xAC"
+ "\x25\x21\x8F\xF7\xEF\xF2\x6A\xDF"
+ "\xE8\xDA\x75\x1A\x8A\xF1\xDD\x38"
+ "\x1F\xF9\x3D\x68\x4A\xBB\x9E\x34"
+ "\x1F\x66\x1F\x9C\x2B\x54\xFF\x60"
+ "\x7F\x29\x4B\x55\x80\x8F\x4E\xA7"
+ "\xA6\x9A\x0A\xD9\x0D\x19\x00\xF8"
+ "\x1F\xBC\x0C\x40\x6B\xEC\x99\x25"
+ "\x94\x70\x74\x0E\x1D\xC5\xBC\x12"
+ "\xF3\x42\xBE\x95\xBF\xFB\x4E\x55"
+ "\x9A\xB9\xCE\x14\x16\x5B\xDC\xD3"
+ "\x75\x42\x62\x04\x31\x1F\x95\x7C"
+ "\x66\x1A\x97\xDC\x2F\x40\x5C\x39"
+ "\x78\xE6\x02\xDB\x49\xE1\xC6\x47"
+ "\xC2\x78\x9A\xBB\xF3\xBE\xCB\x93"
+ "\xD8\xB8\xE8\xBB\x8C\xB3\x9B\xA7"
+ "\xC2\x89\xF3\x91\x88\x83\x3D\xF0"
+ "\x29\xA2\xCD\xB5\x79\x16\xC2\x40"
+ "\x11\x03\x8E\x9C\xFD\xC9\x43\xC4"
+ "\xC2\x19\xF0\x4A\x32\xEF\x0C\x2B"
+ "\xD3\x2B\xE9\xD4\x4C\xDE\x95\xCF"
+ "\x04\x03\xD3\x2C\x7F\x82\xC8\xFA"
+ "\x0F\xD8\x7A\x39\x7B\x01\x41\x9C"
+ "\x78\xB6\xC9\xBF\xF9\x78\x57\x88"
+ "\xB1\xA5\xE1\xE0\xD9\x16\xD4\xC8"
+ "\xEE\xC4\xBE\x7B\x55\x59\x00\x48"
+ "\x1B\xBC\x14\xFA\x2A\x9D\xC9\x1C"
+ "\xFB\x28\x3F\x95\xDD\xB7\xD6\xCE"
+ "\x3A\x7F\x09\x0C\x0E\x69\x30\x7D"
+ "\xBC\x68\x9C\x91\x2A\x59\x57\x04"
+ "\xED\x1A\x1E\x00\xB1\x85\x92\x04"
+ "\x28\x8C\x0C\x3C\xC1\xD5\x12\xF7"
+ "\x4C\x3E\xB0\xE7\x86\x62\x68\x91"
+ "\xFC\xC4\xE2\xCE\xA6\xDC\x5E\x93"
+ "\x5D\x8D\x8C\x68\xB3\xB2\xB9\x64"
+ "\x16\xB8\xC8\x6F\xD8\xEE\x21\xBD"
+ "\xAC\x18\x0C\x7D\x0D\x05\xAB\xF1"
+ "\xFA\xDD\xE2\x48\xDF\x4C\x02\x39"
+ "\x69\xA1\x62\xBD\x49\x3A\x9D\x91"
+ "\x30\x70\x56\xA4\x37\xDD\x7C\xC0"
+ "\x0A\xA3\x30\x10\x26\x25\x41\x2C",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec tf_ctr_enc_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\xDF\xDD\x69\xFA\xB0\x2E\xFD\xFE"
+ "\x70\x9E\xC5\x4B\xC9\xD4\xA1\x30"
+ "\x26\x9B\x89\xA1\xEE\x43\xE0\x52"
+ "\x55\x17\x4E\xC7\x0E\x33\x1F\xF1"
+ "\x9F\x8D\x40\x9F\x24\xFD\x92\xA0"
+ "\xBC\x8F\x35\xDD\x67\x38\xD8\xAA"
+ "\xCF\xF8\x48\xCA\xFB\xE4\x5C\x60"
+ "\x01\x41\x21\x12\x38\xAB\x52\x4F"
+ "\xA8\x57\x20\xE0\x21\x6A\x17\x0D"
+ "\x0E\xF9\x8E\x49\x42\x00\x3C\x94"
+ "\x14\xC0\xD0\x8D\x8A\x98\xEB\x29"
+ "\xEC\xAE\x96\x44\xC0\x3C\x48\xDC"
+ "\x29\x35\x25\x2F\xE7\x11\x6C\x68"
+ "\xC8\x67\x0A\x2F\xF4\x07\xBE\xF9"
+ "\x2C\x31\x87\x40\xAB\xB2\xB6\xFA"
+ "\xD2\xC9\x6D\x5C\x50\xE9\xE6\x7E"
+ "\xE3\x0A\xD2\xD5\x6D\x8D\x64\x9E"
+ "\x70\xCE\x03\x76\xDD\xE0\xF0\x8C"
+ "\x84\x86\x8B\x6A\xFE\xC7\xF9\x69"
+ "\x2E\xFE\xFC\xC2\xC4\x1A\x55\x58"
+ "\xB3\xBE\xE2\x7E\xED\x39\x42\x6C"
+ "\xB4\x42\x97\x9A\xEC\xE1\x0A\x06"
+ "\x02\xC5\x03\x9D\xC4\x48\x15\x66"
+ "\x35\x6A\xC2\xC9\xA2\x26\x30\xBB"
+ "\xDB\x2D\xC8\x08\x2B\xA0\x29\x1A"
+ "\x23\x61\x48\xEA\x80\x04\x27\xAA"
+ "\x69\x49\xE8\xE8\x4A\x83\x6B\x5A"
+ "\xCA\x7C\xD3\xB1\xB5\x0B\xCC\x23"
+ "\x74\x1F\xA9\x87\xCD\xED\xC0\x2D"
+ "\xBF\xEB\xCF\x16\x2D\x2A\x2E\x1D"
+ "\x96\xBA\x36\x11\x45\x41\xDA\xCE"
+ "\xA4\x48\x80\x8B\x06\xF4\x98\x89"
+ "\x8B\x23\x08\x53\xF4\xD4\x5A\x24"
+ "\x8B\xF8\x43\x73\xD1\xEE\xC4\xB0"
+ "\xF8\xFE\x09\x0C\x75\x05\x38\x0B"
+ "\x7C\x81\xDE\x9D\xE4\x61\x37\x63"
+ "\x63\xAD\x12\xD2\x04\xB9\xCE\x45"
+ "\x5A\x1A\x6E\xB3\x78\x2A\xA4\x74"
+ "\x86\xD0\xE3\xFF\xDA\x38\x9C\xB5"
+ "\xB8\xB1\xDB\x38\x2F\xC5\x6A\xB4"
+ "\xEB\x6E\x96\xE8\x43\x80\xB5\x51"
+ "\x61\x2D\x48\xAA\x07\x65\x11\x8C"
+ "\x48\xE3\x90\x7E\x78\x3A\xEC\x97"
+ "\x05\x3D\x84\xE7\x90\x2B\xAA\xBD"
+ "\x83\x29\x0E\x1A\x81\x73\x7B\xE0"
+ "\x7A\x01\x4A\x37\x3B\x77\x7F\x8D"
+ "\x49\xA4\x2F\x6E\xBE\x68\x99\x08"
+ "\x99\xAA\x4C\x12\x04\xAE\x1F\x77"
+ "\x35\x88\xF1\x65\x06\x0A\x0B\x4D"
+ "\x47\xF9\x50\x38\x5D\x71\xF9\x6E"
+ "\xDE\xEC\x61\x35\x2C\x4C\x96\x50"
+ "\xE8\x28\x93\x9C\x7E\x01\xC6\x04"
+ "\xB2\xD6\xBC\x6C\x17\xEB\xC1\x7D"
+ "\x11\xE9\x43\x83\x76\xAA\x53\x37"
+ "\x0C\x1D\x39\x89\x53\x72\x09\x7E"
+ "\xD9\x85\x16\x04\xA5\x2C\x05\x6F"
+ "\x17\x0C\x6E\x66\xAA\x84\xA7\xD9"
+ "\xE2\xD9\xC4\xEB\x43\x3E\xB1\x8D"
+ "\x7C\x36\xC7\x71\x70\x9C\x10\xD8"
+ "\xE8\x47\x2A\x4D\xFD\xA1\xBC\xE3"
+ "\xB9\x32\xE2\xC1\x82\xAC\xFE\xCC"
+ "\xC5\xC9\x7F\x9E\xCF\x33\x7A\xDF",
+ .rlen = 496,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
+ "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\xEB\x44\xAF\x49\x27\xB8\xFB\x44"
+ "\x4C\xA6\xC3\x0C\x8B\xD0\x01\x0C"
+ "\x53\xC8\x16\x38\xDE\x40\x4F\x91"
+ "\x25\x6D\x4C\xA0\x9A\x87\x1E\xDA"
+ "\x88\x7E\x89\xE9\x67\x2B\x83\xA2"
+ "\x5F\x2E\x23\x3E\x45\xB9\x77\x7B"
+ "\xA6\x7E\x47\x36\x81\x9F\x9B\xF3"
+ "\xE0\xF0\xD7\x47\xA9\xC8\xEF\x33"
+ "\x0C\x43\xFE\x67\x50\x0A\x2C\x3E"
+ "\xA0\xE1\x25\x8E\x80\x07\x4A\xC0"
+ "\x64\x89\x9F\x6A\x27\x96\x07\xA6"
+ "\x9B\xC8\x1B\x21\x60\xAE\x5D\x01"
+ "\xE2\xCD\xC8\xAA\x6C\x9D\x1C\x34"
+ "\x39\x18\x09\xA4\x82\x59\x78\xE7"
+ "\xFC\x59\x65\xF2\x94\xFF\xFB\xE2"
+ "\x3C\xDA\xB1\x90\x95\xBF\x91\xE3"
+ "\xE6\x87\x31\x9E\x16\x85\xAD\xB1"
+ "\x4C\xAE\x43\x4D\x19\x58\xB5\x5E"
+ "\x2E\xF5\x09\xAA\x39\xF4\xC0\xB3"
+ "\xD4\x4D\xDB\x73\x7A\xD4\xF1\xBF"
+ "\x89\x16\x4D\x2D\xA2\x26\x33\x72"
+ "\x18\x33\x7E\xD6\xD2\x16\xA4\x54"
+ "\xF4\x8C\xB3\x52\xDF\x21\x9C\xEB"
+ "\xBF\x49\xD3\xF9\x05\x06\xCB\xD2"
+ "\xA9\xD2\x3B\x6E\x19\x8C\xBC\x19"
+ "\xAB\x89\xD6\xD8\xCD\x56\x89\x5E"
+ "\xAC\x00\xE3\x50\x63\x4A\x80\x9A"
+ "\x05\xBC\x50\x39\xD3\x32\xD9\x0D"
+ "\xE3\x20\x0D\x75\x54\xEC\xE6\x31"
+ "\x14\xB9\x3A\x59\x00\x43\x37\x8E"
+ "\x8C\x5A\x79\x62\x14\x76\x8A\xAE"
+ "\x8F\xCC\xA1\x6C\x38\x78\xDD\x2D"
+ "\x8B\x6D\xEA\xBD\x7B\x25\xFF\x60"
+ "\xC9\x87\xB1\x79\x1E\xA5\x86\x68"
+ "\x81\xB4\xE2\xC1\x05\x7D\x3A\x73"
+ "\xD0\xDA\x75\x77\x9E\x05\x27\xF1"
+ "\x08\xA9\x66\x64\x6C\xBC\x82\x17"
+ "\x2C\x23\x5F\x62\x4D\x02\x1A\x58"
+ "\xE7\xB7\x23\x6D\xE2\x20\xDA\xEF"
+ "\xB4\xB3\x3F\xB2\x2B\x69\x98\x83"
+ "\x95\x87\x13\x57\x60\xD7\xB5\xB1"
+ "\xEE\x0A\x2F\x95\x36\x4C\x76\x5D"
+ "\x5F\xD9\x19\xED\xB9\xA5\x48\xBF"
+ "\xC8\xAB\x0F\x71\xCC\x61\x8E\x0A"
+ "\xD0\x29\x44\xA8\xB9\xC1\xE8\xC8"
+ "\xC9\xA8\x28\x81\xFB\x50\xF2\xF0"
+ "\x26\xAE\x39\xB8\x91\xCD\xA8\xAC"
+ "\xDE\x55\x1B\x50\x14\x53\x44\x17"
+ "\x54\x46\xFC\xB1\xE4\x07\x6B\x9A"
+ "\x01\x14\xF0\x2E\x2E\xDB\x46\x1B"
+ "\x1A\x09\x97\xA9\xB6\x97\x79\x06"
+ "\xFB\xCB\x85\xCF\xDD\xA1\x41\xB1"
+ "\x00\xAA\xF7\xE0\x89\x73\xFB\xE5"
+ "\xBF\x84\xDB\xC9\xCD\xC4\xA2\x0D"
+ "\x3B\xAC\xF9\xDF\x96\xBF\x88\x23"
+ "\x41\x67\xA1\x24\x99\x7E\xCC\x9B"
+ "\x02\x8F\x6A\x49\xF6\x25\xBA\x7A"
+ "\xF4\x78\xFD\x79\x62\x63\x4F\x14"
+ "\xD6\x11\x11\x04\x05\x5F\x7E\xEA"
+ "\x4C\xB6\xF8\xF4\x5F\x48\x52\x54"
+ "\x94\x63\xA8\x4E\xCF\xD2\x1B\x1B"
+ "\x22\x18\x6A\xAF\x6E\x3E\xE1\x0D",
+ .rlen = 496,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59",
+ .ilen = 499,
+ .result = "\xDF\xDD\x69\xFA\xB0\x2E\xFD\xFE"
+ "\x70\x9E\xC5\x4B\xC9\xD4\xA1\x30"
+ "\x26\x9B\x89\xA1\xEE\x43\xE0\x52"
+ "\x55\x17\x4E\xC7\x0E\x33\x1F\xF1"
+ "\x9F\x8D\x40\x9F\x24\xFD\x92\xA0"
+ "\xBC\x8F\x35\xDD\x67\x38\xD8\xAA"
+ "\xCF\xF8\x48\xCA\xFB\xE4\x5C\x60"
+ "\x01\x41\x21\x12\x38\xAB\x52\x4F"
+ "\xA8\x57\x20\xE0\x21\x6A\x17\x0D"
+ "\x0E\xF9\x8E\x49\x42\x00\x3C\x94"
+ "\x14\xC0\xD0\x8D\x8A\x98\xEB\x29"
+ "\xEC\xAE\x96\x44\xC0\x3C\x48\xDC"
+ "\x29\x35\x25\x2F\xE7\x11\x6C\x68"
+ "\xC8\x67\x0A\x2F\xF4\x07\xBE\xF9"
+ "\x2C\x31\x87\x40\xAB\xB2\xB6\xFA"
+ "\xD2\xC9\x6D\x5C\x50\xE9\xE6\x7E"
+ "\xE3\x0A\xD2\xD5\x6D\x8D\x64\x9E"
+ "\x70\xCE\x03\x76\xDD\xE0\xF0\x8C"
+ "\x84\x86\x8B\x6A\xFE\xC7\xF9\x69"
+ "\x2E\xFE\xFC\xC2\xC4\x1A\x55\x58"
+ "\xB3\xBE\xE2\x7E\xED\x39\x42\x6C"
+ "\xB4\x42\x97\x9A\xEC\xE1\x0A\x06"
+ "\x02\xC5\x03\x9D\xC4\x48\x15\x66"
+ "\x35\x6A\xC2\xC9\xA2\x26\x30\xBB"
+ "\xDB\x2D\xC8\x08\x2B\xA0\x29\x1A"
+ "\x23\x61\x48\xEA\x80\x04\x27\xAA"
+ "\x69\x49\xE8\xE8\x4A\x83\x6B\x5A"
+ "\xCA\x7C\xD3\xB1\xB5\x0B\xCC\x23"
+ "\x74\x1F\xA9\x87\xCD\xED\xC0\x2D"
+ "\xBF\xEB\xCF\x16\x2D\x2A\x2E\x1D"
+ "\x96\xBA\x36\x11\x45\x41\xDA\xCE"
+ "\xA4\x48\x80\x8B\x06\xF4\x98\x89"
+ "\x8B\x23\x08\x53\xF4\xD4\x5A\x24"
+ "\x8B\xF8\x43\x73\xD1\xEE\xC4\xB0"
+ "\xF8\xFE\x09\x0C\x75\x05\x38\x0B"
+ "\x7C\x81\xDE\x9D\xE4\x61\x37\x63"
+ "\x63\xAD\x12\xD2\x04\xB9\xCE\x45"
+ "\x5A\x1A\x6E\xB3\x78\x2A\xA4\x74"
+ "\x86\xD0\xE3\xFF\xDA\x38\x9C\xB5"
+ "\xB8\xB1\xDB\x38\x2F\xC5\x6A\xB4"
+ "\xEB\x6E\x96\xE8\x43\x80\xB5\x51"
+ "\x61\x2D\x48\xAA\x07\x65\x11\x8C"
+ "\x48\xE3\x90\x7E\x78\x3A\xEC\x97"
+ "\x05\x3D\x84\xE7\x90\x2B\xAA\xBD"
+ "\x83\x29\x0E\x1A\x81\x73\x7B\xE0"
+ "\x7A\x01\x4A\x37\x3B\x77\x7F\x8D"
+ "\x49\xA4\x2F\x6E\xBE\x68\x99\x08"
+ "\x99\xAA\x4C\x12\x04\xAE\x1F\x77"
+ "\x35\x88\xF1\x65\x06\x0A\x0B\x4D"
+ "\x47\xF9\x50\x38\x5D\x71\xF9\x6E"
+ "\xDE\xEC\x61\x35\x2C\x4C\x96\x50"
+ "\xE8\x28\x93\x9C\x7E\x01\xC6\x04"
+ "\xB2\xD6\xBC\x6C\x17\xEB\xC1\x7D"
+ "\x11\xE9\x43\x83\x76\xAA\x53\x37"
+ "\x0C\x1D\x39\x89\x53\x72\x09\x7E"
+ "\xD9\x85\x16\x04\xA5\x2C\x05\x6F"
+ "\x17\x0C\x6E\x66\xAA\x84\xA7\xD9"
+ "\xE2\xD9\xC4\xEB\x43\x3E\xB1\x8D"
+ "\x7C\x36\xC7\x71\x70\x9C\x10\xD8"
+ "\xE8\x47\x2A\x4D\xFD\xA1\xBC\xE3"
+ "\xB9\x32\xE2\xC1\x82\xAC\xFE\xCC"
+ "\xC5\xC9\x7F\x9E\xCF\x33\x7A\xDF"
+ "\x6C\x82\x9D",
+ .rlen = 499,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 499 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec tf_ctr_dec_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\xDF\xDD\x69\xFA\xB0\x2E\xFD\xFE"
+ "\x70\x9E\xC5\x4B\xC9\xD4\xA1\x30"
+ "\x26\x9B\x89\xA1\xEE\x43\xE0\x52"
+ "\x55\x17\x4E\xC7\x0E\x33\x1F\xF1"
+ "\x9F\x8D\x40\x9F\x24\xFD\x92\xA0"
+ "\xBC\x8F\x35\xDD\x67\x38\xD8\xAA"
+ "\xCF\xF8\x48\xCA\xFB\xE4\x5C\x60"
+ "\x01\x41\x21\x12\x38\xAB\x52\x4F"
+ "\xA8\x57\x20\xE0\x21\x6A\x17\x0D"
+ "\x0E\xF9\x8E\x49\x42\x00\x3C\x94"
+ "\x14\xC0\xD0\x8D\x8A\x98\xEB\x29"
+ "\xEC\xAE\x96\x44\xC0\x3C\x48\xDC"
+ "\x29\x35\x25\x2F\xE7\x11\x6C\x68"
+ "\xC8\x67\x0A\x2F\xF4\x07\xBE\xF9"
+ "\x2C\x31\x87\x40\xAB\xB2\xB6\xFA"
+ "\xD2\xC9\x6D\x5C\x50\xE9\xE6\x7E"
+ "\xE3\x0A\xD2\xD5\x6D\x8D\x64\x9E"
+ "\x70\xCE\x03\x76\xDD\xE0\xF0\x8C"
+ "\x84\x86\x8B\x6A\xFE\xC7\xF9\x69"
+ "\x2E\xFE\xFC\xC2\xC4\x1A\x55\x58"
+ "\xB3\xBE\xE2\x7E\xED\x39\x42\x6C"
+ "\xB4\x42\x97\x9A\xEC\xE1\x0A\x06"
+ "\x02\xC5\x03\x9D\xC4\x48\x15\x66"
+ "\x35\x6A\xC2\xC9\xA2\x26\x30\xBB"
+ "\xDB\x2D\xC8\x08\x2B\xA0\x29\x1A"
+ "\x23\x61\x48\xEA\x80\x04\x27\xAA"
+ "\x69\x49\xE8\xE8\x4A\x83\x6B\x5A"
+ "\xCA\x7C\xD3\xB1\xB5\x0B\xCC\x23"
+ "\x74\x1F\xA9\x87\xCD\xED\xC0\x2D"
+ "\xBF\xEB\xCF\x16\x2D\x2A\x2E\x1D"
+ "\x96\xBA\x36\x11\x45\x41\xDA\xCE"
+ "\xA4\x48\x80\x8B\x06\xF4\x98\x89"
+ "\x8B\x23\x08\x53\xF4\xD4\x5A\x24"
+ "\x8B\xF8\x43\x73\xD1\xEE\xC4\xB0"
+ "\xF8\xFE\x09\x0C\x75\x05\x38\x0B"
+ "\x7C\x81\xDE\x9D\xE4\x61\x37\x63"
+ "\x63\xAD\x12\xD2\x04\xB9\xCE\x45"
+ "\x5A\x1A\x6E\xB3\x78\x2A\xA4\x74"
+ "\x86\xD0\xE3\xFF\xDA\x38\x9C\xB5"
+ "\xB8\xB1\xDB\x38\x2F\xC5\x6A\xB4"
+ "\xEB\x6E\x96\xE8\x43\x80\xB5\x51"
+ "\x61\x2D\x48\xAA\x07\x65\x11\x8C"
+ "\x48\xE3\x90\x7E\x78\x3A\xEC\x97"
+ "\x05\x3D\x84\xE7\x90\x2B\xAA\xBD"
+ "\x83\x29\x0E\x1A\x81\x73\x7B\xE0"
+ "\x7A\x01\x4A\x37\x3B\x77\x7F\x8D"
+ "\x49\xA4\x2F\x6E\xBE\x68\x99\x08"
+ "\x99\xAA\x4C\x12\x04\xAE\x1F\x77"
+ "\x35\x88\xF1\x65\x06\x0A\x0B\x4D"
+ "\x47\xF9\x50\x38\x5D\x71\xF9\x6E"
+ "\xDE\xEC\x61\x35\x2C\x4C\x96\x50"
+ "\xE8\x28\x93\x9C\x7E\x01\xC6\x04"
+ "\xB2\xD6\xBC\x6C\x17\xEB\xC1\x7D"
+ "\x11\xE9\x43\x83\x76\xAA\x53\x37"
+ "\x0C\x1D\x39\x89\x53\x72\x09\x7E"
+ "\xD9\x85\x16\x04\xA5\x2C\x05\x6F"
+ "\x17\x0C\x6E\x66\xAA\x84\xA7\xD9"
+ "\xE2\xD9\xC4\xEB\x43\x3E\xB1\x8D"
+ "\x7C\x36\xC7\x71\x70\x9C\x10\xD8"
+ "\xE8\x47\x2A\x4D\xFD\xA1\xBC\xE3"
+ "\xB9\x32\xE2\xC1\x82\xAC\xFE\xCC"
+ "\xC5\xC9\x7F\x9E\xCF\x33\x7A\xDF",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
+ "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\xEB\x44\xAF\x49\x27\xB8\xFB\x44"
+ "\x4C\xA6\xC3\x0C\x8B\xD0\x01\x0C"
+ "\x53\xC8\x16\x38\xDE\x40\x4F\x91"
+ "\x25\x6D\x4C\xA0\x9A\x87\x1E\xDA"
+ "\x88\x7E\x89\xE9\x67\x2B\x83\xA2"
+ "\x5F\x2E\x23\x3E\x45\xB9\x77\x7B"
+ "\xA6\x7E\x47\x36\x81\x9F\x9B\xF3"
+ "\xE0\xF0\xD7\x47\xA9\xC8\xEF\x33"
+ "\x0C\x43\xFE\x67\x50\x0A\x2C\x3E"
+ "\xA0\xE1\x25\x8E\x80\x07\x4A\xC0"
+ "\x64\x89\x9F\x6A\x27\x96\x07\xA6"
+ "\x9B\xC8\x1B\x21\x60\xAE\x5D\x01"
+ "\xE2\xCD\xC8\xAA\x6C\x9D\x1C\x34"
+ "\x39\x18\x09\xA4\x82\x59\x78\xE7"
+ "\xFC\x59\x65\xF2\x94\xFF\xFB\xE2"
+ "\x3C\xDA\xB1\x90\x95\xBF\x91\xE3"
+ "\xE6\x87\x31\x9E\x16\x85\xAD\xB1"
+ "\x4C\xAE\x43\x4D\x19\x58\xB5\x5E"
+ "\x2E\xF5\x09\xAA\x39\xF4\xC0\xB3"
+ "\xD4\x4D\xDB\x73\x7A\xD4\xF1\xBF"
+ "\x89\x16\x4D\x2D\xA2\x26\x33\x72"
+ "\x18\x33\x7E\xD6\xD2\x16\xA4\x54"
+ "\xF4\x8C\xB3\x52\xDF\x21\x9C\xEB"
+ "\xBF\x49\xD3\xF9\x05\x06\xCB\xD2"
+ "\xA9\xD2\x3B\x6E\x19\x8C\xBC\x19"
+ "\xAB\x89\xD6\xD8\xCD\x56\x89\x5E"
+ "\xAC\x00\xE3\x50\x63\x4A\x80\x9A"
+ "\x05\xBC\x50\x39\xD3\x32\xD9\x0D"
+ "\xE3\x20\x0D\x75\x54\xEC\xE6\x31"
+ "\x14\xB9\x3A\x59\x00\x43\x37\x8E"
+ "\x8C\x5A\x79\x62\x14\x76\x8A\xAE"
+ "\x8F\xCC\xA1\x6C\x38\x78\xDD\x2D"
+ "\x8B\x6D\xEA\xBD\x7B\x25\xFF\x60"
+ "\xC9\x87\xB1\x79\x1E\xA5\x86\x68"
+ "\x81\xB4\xE2\xC1\x05\x7D\x3A\x73"
+ "\xD0\xDA\x75\x77\x9E\x05\x27\xF1"
+ "\x08\xA9\x66\x64\x6C\xBC\x82\x17"
+ "\x2C\x23\x5F\x62\x4D\x02\x1A\x58"
+ "\xE7\xB7\x23\x6D\xE2\x20\xDA\xEF"
+ "\xB4\xB3\x3F\xB2\x2B\x69\x98\x83"
+ "\x95\x87\x13\x57\x60\xD7\xB5\xB1"
+ "\xEE\x0A\x2F\x95\x36\x4C\x76\x5D"
+ "\x5F\xD9\x19\xED\xB9\xA5\x48\xBF"
+ "\xC8\xAB\x0F\x71\xCC\x61\x8E\x0A"
+ "\xD0\x29\x44\xA8\xB9\xC1\xE8\xC8"
+ "\xC9\xA8\x28\x81\xFB\x50\xF2\xF0"
+ "\x26\xAE\x39\xB8\x91\xCD\xA8\xAC"
+ "\xDE\x55\x1B\x50\x14\x53\x44\x17"
+ "\x54\x46\xFC\xB1\xE4\x07\x6B\x9A"
+ "\x01\x14\xF0\x2E\x2E\xDB\x46\x1B"
+ "\x1A\x09\x97\xA9\xB6\x97\x79\x06"
+ "\xFB\xCB\x85\xCF\xDD\xA1\x41\xB1"
+ "\x00\xAA\xF7\xE0\x89\x73\xFB\xE5"
+ "\xBF\x84\xDB\xC9\xCD\xC4\xA2\x0D"
+ "\x3B\xAC\xF9\xDF\x96\xBF\x88\x23"
+ "\x41\x67\xA1\x24\x99\x7E\xCC\x9B"
+ "\x02\x8F\x6A\x49\xF6\x25\xBA\x7A"
+ "\xF4\x78\xFD\x79\x62\x63\x4F\x14"
+ "\xD6\x11\x11\x04\x05\x5F\x7E\xEA"
+ "\x4C\xB6\xF8\xF4\x5F\x48\x52\x54"
+ "\x94\x63\xA8\x4E\xCF\xD2\x1B\x1B"
+ "\x22\x18\x6A\xAF\x6E\x3E\xE1\x0D",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\xDF\xDD\x69\xFA\xB0\x2E\xFD\xFE"
+ "\x70\x9E\xC5\x4B\xC9\xD4\xA1\x30"
+ "\x26\x9B\x89\xA1\xEE\x43\xE0\x52"
+ "\x55\x17\x4E\xC7\x0E\x33\x1F\xF1"
+ "\x9F\x8D\x40\x9F\x24\xFD\x92\xA0"
+ "\xBC\x8F\x35\xDD\x67\x38\xD8\xAA"
+ "\xCF\xF8\x48\xCA\xFB\xE4\x5C\x60"
+ "\x01\x41\x21\x12\x38\xAB\x52\x4F"
+ "\xA8\x57\x20\xE0\x21\x6A\x17\x0D"
+ "\x0E\xF9\x8E\x49\x42\x00\x3C\x94"
+ "\x14\xC0\xD0\x8D\x8A\x98\xEB\x29"
+ "\xEC\xAE\x96\x44\xC0\x3C\x48\xDC"
+ "\x29\x35\x25\x2F\xE7\x11\x6C\x68"
+ "\xC8\x67\x0A\x2F\xF4\x07\xBE\xF9"
+ "\x2C\x31\x87\x40\xAB\xB2\xB6\xFA"
+ "\xD2\xC9\x6D\x5C\x50\xE9\xE6\x7E"
+ "\xE3\x0A\xD2\xD5\x6D\x8D\x64\x9E"
+ "\x70\xCE\x03\x76\xDD\xE0\xF0\x8C"
+ "\x84\x86\x8B\x6A\xFE\xC7\xF9\x69"
+ "\x2E\xFE\xFC\xC2\xC4\x1A\x55\x58"
+ "\xB3\xBE\xE2\x7E\xED\x39\x42\x6C"
+ "\xB4\x42\x97\x9A\xEC\xE1\x0A\x06"
+ "\x02\xC5\x03\x9D\xC4\x48\x15\x66"
+ "\x35\x6A\xC2\xC9\xA2\x26\x30\xBB"
+ "\xDB\x2D\xC8\x08\x2B\xA0\x29\x1A"
+ "\x23\x61\x48\xEA\x80\x04\x27\xAA"
+ "\x69\x49\xE8\xE8\x4A\x83\x6B\x5A"
+ "\xCA\x7C\xD3\xB1\xB5\x0B\xCC\x23"
+ "\x74\x1F\xA9\x87\xCD\xED\xC0\x2D"
+ "\xBF\xEB\xCF\x16\x2D\x2A\x2E\x1D"
+ "\x96\xBA\x36\x11\x45\x41\xDA\xCE"
+ "\xA4\x48\x80\x8B\x06\xF4\x98\x89"
+ "\x8B\x23\x08\x53\xF4\xD4\x5A\x24"
+ "\x8B\xF8\x43\x73\xD1\xEE\xC4\xB0"
+ "\xF8\xFE\x09\x0C\x75\x05\x38\x0B"
+ "\x7C\x81\xDE\x9D\xE4\x61\x37\x63"
+ "\x63\xAD\x12\xD2\x04\xB9\xCE\x45"
+ "\x5A\x1A\x6E\xB3\x78\x2A\xA4\x74"
+ "\x86\xD0\xE3\xFF\xDA\x38\x9C\xB5"
+ "\xB8\xB1\xDB\x38\x2F\xC5\x6A\xB4"
+ "\xEB\x6E\x96\xE8\x43\x80\xB5\x51"
+ "\x61\x2D\x48\xAA\x07\x65\x11\x8C"
+ "\x48\xE3\x90\x7E\x78\x3A\xEC\x97"
+ "\x05\x3D\x84\xE7\x90\x2B\xAA\xBD"
+ "\x83\x29\x0E\x1A\x81\x73\x7B\xE0"
+ "\x7A\x01\x4A\x37\x3B\x77\x7F\x8D"
+ "\x49\xA4\x2F\x6E\xBE\x68\x99\x08"
+ "\x99\xAA\x4C\x12\x04\xAE\x1F\x77"
+ "\x35\x88\xF1\x65\x06\x0A\x0B\x4D"
+ "\x47\xF9\x50\x38\x5D\x71\xF9\x6E"
+ "\xDE\xEC\x61\x35\x2C\x4C\x96\x50"
+ "\xE8\x28\x93\x9C\x7E\x01\xC6\x04"
+ "\xB2\xD6\xBC\x6C\x17\xEB\xC1\x7D"
+ "\x11\xE9\x43\x83\x76\xAA\x53\x37"
+ "\x0C\x1D\x39\x89\x53\x72\x09\x7E"
+ "\xD9\x85\x16\x04\xA5\x2C\x05\x6F"
+ "\x17\x0C\x6E\x66\xAA\x84\xA7\xD9"
+ "\xE2\xD9\xC4\xEB\x43\x3E\xB1\x8D"
+ "\x7C\x36\xC7\x71\x70\x9C\x10\xD8"
+ "\xE8\x47\x2A\x4D\xFD\xA1\xBC\xE3"
+ "\xB9\x32\xE2\xC1\x82\xAC\xFE\xCC"
+ "\xC5\xC9\x7F\x9E\xCF\x33\x7A\xDF"
+ "\x6C\x82\x9D",
+ .ilen = 499,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59",
+ .rlen = 499,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 499 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec tf_lrw_enc_tv_template[] = {
+ /* Generated from AES-LRW test vectors */
+ {
+ .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d"
+ "\x4c\x26\x84\x14\xb5\x68\x01\x85"
+ "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03"
+ "\xee\x5a\x83\x0c\xcc\x09\x4c\x87",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\xa1\x6c\x50\x69\x26\xa4\xef\x7b"
+ "\x7c\xc6\x91\xeb\x72\xdd\x9b\xee",
+ .rlen = 16,
+ }, {
+ .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c"
+ "\xd7\x79\xe8\x0f\x54\x88\x79\x44"
+ "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea"
+ "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x02",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\xab\x72\x0a\xad\x3b\x0c\xf0\xc9"
+ "\x42\x2f\xf1\xae\xf1\x3c\xb1\xbd",
+ .rlen = 16,
+ }, {
+ .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50"
+ "\x30\xfe\x69\xe2\x37\x7f\x98\x47"
+ "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6"
+ "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x85\xa7\x56\x67\x08\xfa\x42\xe1"
+ "\x22\xe6\x82\xfc\xd9\xb4\xd7\xd4",
+ .rlen = 16,
+ }, {
+ .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15"
+ "\x25\x83\xf7\x3c\x1f\x01\x28\x74"
+ "\xca\xc6\xbc\x35\x4d\x4a\x65\x54"
+ "\x90\xae\x61\xcf\x7b\xae\xbd\xcc"
+ "\xad\xe4\x94\xc5\x4a\x29\xae\x70",
+ .klen = 40,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\xd2\xaf\x69\x35\x24\x1d\x0e\x1c"
+ "\x84\x8b\x05\xe4\xa2\x2f\x16\xf5",
+ .rlen = 16,
+ }, {
+ .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff"
+ "\xf8\x86\xce\xac\x93\xc5\xad\xc6"
+ "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd"
+ "\x52\x13\xb2\xb7\xf0\xff\x11\xd8"
+ "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f",
+ .klen = 40,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x4a\x23\x56\xd7\xff\x90\xd0\x9a"
+ "\x0d\x7c\x26\xfc\xf0\xf0\xf6\xe4",
+ .rlen = 16,
+ }, {
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x30\xaf\x26\x05\x9d\x5d\x0a\x58"
+ "\xe2\xe7\xce\x8a\xb2\x56\x6d\x76",
+ .rlen = 16,
+ }, {
+ .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d"
+ "\xd4\x70\x98\x0b\xc7\x95\x84\xc8"
+ "\xb2\xfb\x64\xce\x60\x97\x87\x8d"
+ "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7"
+ "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4"
+ "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\xdf\xcf\xdc\xd2\xe1\xcf\x86\x75"
+ "\x17\x66\x5e\x0c\x14\xa1\x3d\x40",
+ .rlen = 16,
+ }, {
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x05\x11\xb7\x18\xab\xc6\x2d\xac"
+ "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c"
+ "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8"
+ "\x50\x38\x1f\x71\x49\xb6\x57\xd6"
+ "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90"
+ "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6"
+ "\xad\x1e\x9e\x20\x5f\x38\xbe\x04"
+ "\xda\x10\x8e\xed\xa2\xa4\x87\xab"
+ "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c"
+ "\xc9\xac\x42\x31\x95\x7c\xc9\x04"
+ "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6"
+ "\x15\xd7\x3f\x4f\x2f\x66\x69\x03"
+ "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65"
+ "\x4c\x96\x12\xed\x7c\x92\x03\x01"
+ "\x6f\xbc\x35\x93\xac\xf1\x27\xf1"
+ "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50"
+ "\x89\xa4\x8e\x66\x44\x85\xcc\xfd"
+ "\x33\x14\x70\xe3\x96\xb2\xc3\xd3"
+ "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5"
+ "\x2d\x64\x75\xdd\xb4\x54\xe6\x74"
+ "\x8c\xd3\x9d\x9e\x86\xab\x51\x53"
+ "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40"
+ "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5"
+ "\x76\x12\x73\x44\x1a\x56\xd7\x72"
+ "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda"
+ "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd"
+ "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60"
+ "\x1a\xe2\x70\x85\x58\xc2\x1b\x09"
+ "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9"
+ "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8"
+ "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8"
+ "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10"
+ "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1"
+ "\x90\x3e\x76\x4a\x74\xa4\x21\x2c"
+ "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e"
+ "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f"
+ "\x8d\x23\x31\x74\x84\xeb\x88\x6e"
+ "\xcc\xb9\xbc\x22\x83\x19\x07\x22"
+ "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78"
+ "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5"
+ "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41"
+ "\x3c\xce\x8f\x42\x60\x71\xa7\x75"
+ "\x08\x40\x65\x8a\x82\xbf\xf5\x43"
+ "\x71\x96\xa9\x4d\x44\x8a\x20\xbe"
+ "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65"
+ "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9"
+ "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4"
+ "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a"
+ "\x62\x73\x65\xfd\x46\x63\x25\x3d"
+ "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf"
+ "\x24\xf3\xb4\xac\x64\xba\xdf\x4b"
+ "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7"
+ "\xc5\x68\x77\x84\x32\x2b\xcc\x85"
+ "\x74\x96\xf0\x12\x77\x61\xb9\xeb"
+ "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8"
+ "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24"
+ "\xda\x39\x87\x45\xc0\x2b\xbb\x01"
+ "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce"
+ "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6"
+ "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32"
+ "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45"
+ "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6"
+ "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4"
+ "\x21\xc4\xc2\x75\x67\x89\x37\x0a",
+ .ilen = 512,
+ .result = "\x30\x38\xeb\xaf\x12\x43\x1a\x89"
+ "\x62\xa2\x36\xe5\xcf\x77\x1e\xd9"
+ "\x08\xc3\x0d\xdd\x95\xab\x19\x96"
+ "\x27\x52\x41\xc3\xca\xfb\xf6\xee"
+ "\x40\x2d\xdf\xdd\x00\x0c\xb9\x0a"
+ "\x3a\xf0\xc0\xd1\xda\x63\x9e\x45"
+ "\x42\xe9\x29\xc0\xb4\x07\xb4\x31"
+ "\x66\x77\x72\xb5\xb6\xb3\x57\x46"
+ "\x34\x9a\xfe\x03\xaf\x6b\x36\x07"
+ "\x63\x8e\xc2\x5d\xa6\x0f\xb6\x7d"
+ "\xfb\x6d\x82\x51\xb6\x98\xd0\x71"
+ "\xe7\x10\x7a\xdf\xb2\xbd\xf1\x1d"
+ "\x72\x2b\x54\x13\xe3\x6d\x79\x37"
+ "\xa9\x39\x2c\xdf\x21\xab\x87\xd5"
+ "\xee\xef\x9a\x12\x50\x39\x2e\x1b"
+ "\x7d\xe6\x6a\x27\x48\xb9\xe7\xac"
+ "\xaa\xcd\x79\x5f\xf2\xf3\xa0\x08"
+ "\x6f\x2c\xf4\x0e\xd1\xb8\x89\x25"
+ "\x31\x9d\xef\xb1\x1d\x27\x55\x04"
+ "\xc9\x8c\xb7\x68\xdc\xb6\x67\x8a"
+ "\xdb\xcf\x22\xf2\x3b\x6f\xce\xbb"
+ "\x26\xbe\x4f\x27\x04\x42\xd1\x44"
+ "\x4c\x08\xa3\x95\x4c\x7f\x1a\xaf"
+ "\x1d\x28\x14\xfd\xb1\x1a\x34\x18"
+ "\xf5\x1e\x28\x69\x95\x6a\x5a\xba"
+ "\x8e\xb2\x58\x1d\x28\x17\x13\x3d"
+ "\x38\x7d\x14\x8d\xab\x5d\xf9\xe8"
+ "\x3c\x0f\x2b\x0d\x2b\x08\xb4\x4b"
+ "\x6b\x0d\xc8\xa7\x84\xc2\x3a\x1a"
+ "\xb7\xbd\xda\x92\x29\xb8\x5b\x5a"
+ "\x63\xa5\x99\x82\x09\x72\x8f\xc6"
+ "\xa4\x62\x24\x69\x8c\x2d\x26\x00"
+ "\x99\x83\x91\xd6\xc6\xcf\x57\x67"
+ "\x38\xea\xf2\xfc\x29\xe0\x73\x39"
+ "\xf9\x13\x94\x6d\xe2\x58\x28\x75"
+ "\x3e\xae\x71\x90\x07\x70\x1c\x38"
+ "\x5b\x4c\x1e\xb5\xa5\x3b\x20\xef"
+ "\xb1\x4c\x3e\x1a\x72\x62\xbb\x22"
+ "\x82\x09\xe3\x18\x3f\x4f\x48\xfc"
+ "\xdd\xac\xfc\xb6\x09\xdb\xd2\x7b"
+ "\xd6\xb7\x7e\x41\x2f\x14\xf5\x0e"
+ "\xc3\xac\x4a\xed\xe7\x82\xef\x31"
+ "\x1f\x1a\x51\x1e\x29\x60\xc8\x98"
+ "\x93\x51\x1d\x3d\x62\x59\x83\x82"
+ "\x0c\xf1\xd7\x8d\xac\x33\x44\x81"
+ "\x3c\x59\xb7\xd4\x5b\x65\x82\xc4"
+ "\xec\xdc\x24\xfd\x0e\x1a\x79\x94"
+ "\x34\xb0\x62\xfa\x98\x49\x26\x1f"
+ "\xf4\x9e\x40\x44\x5b\x1f\xf8\xbe"
+ "\x36\xff\xc6\xc6\x9d\xf2\xd6\xcc"
+ "\x63\x93\x29\xb9\x0b\x6d\xd7\x6c"
+ "\xdb\xf6\x21\x80\xf7\x5a\x37\x15"
+ "\x0c\xe3\x36\xc8\x74\x75\x20\x91"
+ "\xdf\x52\x2d\x0c\xe7\x45\xff\x46"
+ "\xb3\xf4\xec\xc2\xbd\xd3\x37\xb6"
+ "\x26\xa2\x5d\x7d\x61\xbf\x10\x46"
+ "\x57\x8d\x05\x96\x70\x0b\xd6\x41"
+ "\x5c\xe9\xd3\x54\x81\x39\x3a\xdd"
+ "\x5f\x92\x81\x6e\x35\x03\xd4\x72"
+ "\x3d\x5a\xe7\xb9\x3b\x0c\x84\x23"
+ "\x45\x5d\xec\x72\xc1\x52\xef\x2e"
+ "\x81\x00\xd3\xfe\x4c\x3c\x05\x61"
+ "\x80\x18\xc4\x6c\x03\xd3\xb7\xba"
+ "\x11\xd7\xb8\x6e\xea\xe1\x80\x30",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec tf_lrw_dec_tv_template[] = {
+ /* Generated from AES-LRW test vectors */
+ /* same as enc vectors with input and result reversed */
+ {
+ .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d"
+ "\x4c\x26\x84\x14\xb5\x68\x01\x85"
+ "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03"
+ "\xee\x5a\x83\x0c\xcc\x09\x4c\x87",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\xa1\x6c\x50\x69\x26\xa4\xef\x7b"
+ "\x7c\xc6\x91\xeb\x72\xdd\x9b\xee",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c"
+ "\xd7\x79\xe8\x0f\x54\x88\x79\x44"
+ "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea"
+ "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x02",
+ .input = "\xab\x72\x0a\xad\x3b\x0c\xf0\xc9"
+ "\x42\x2f\xf1\xae\xf1\x3c\xb1\xbd",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50"
+ "\x30\xfe\x69\xe2\x37\x7f\x98\x47"
+ "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6"
+ "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x85\xa7\x56\x67\x08\xfa\x42\xe1"
+ "\x22\xe6\x82\xfc\xd9\xb4\xd7\xd4",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15"
+ "\x25\x83\xf7\x3c\x1f\x01\x28\x74"
+ "\xca\xc6\xbc\x35\x4d\x4a\x65\x54"
+ "\x90\xae\x61\xcf\x7b\xae\xbd\xcc"
+ "\xad\xe4\x94\xc5\x4a\x29\xae\x70",
+ .klen = 40,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\xd2\xaf\x69\x35\x24\x1d\x0e\x1c"
+ "\x84\x8b\x05\xe4\xa2\x2f\x16\xf5",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff"
+ "\xf8\x86\xce\xac\x93\xc5\xad\xc6"
+ "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd"
+ "\x52\x13\xb2\xb7\xf0\xff\x11\xd8"
+ "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f",
+ .klen = 40,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x4a\x23\x56\xd7\xff\x90\xd0\x9a"
+ "\x0d\x7c\x26\xfc\xf0\xf0\xf6\xe4",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x30\xaf\x26\x05\x9d\x5d\x0a\x58"
+ "\xe2\xe7\xce\x8a\xb2\x56\x6d\x76",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d"
+ "\xd4\x70\x98\x0b\xc7\x95\x84\xc8"
+ "\xb2\xfb\x64\xce\x60\x97\x87\x8d"
+ "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7"
+ "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4"
+ "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\xdf\xcf\xdc\xd2\xe1\xcf\x86\x75"
+ "\x17\x66\x5e\x0c\x14\xa1\x3d\x40",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x30\x38\xeb\xaf\x12\x43\x1a\x89"
+ "\x62\xa2\x36\xe5\xcf\x77\x1e\xd9"
+ "\x08\xc3\x0d\xdd\x95\xab\x19\x96"
+ "\x27\x52\x41\xc3\xca\xfb\xf6\xee"
+ "\x40\x2d\xdf\xdd\x00\x0c\xb9\x0a"
+ "\x3a\xf0\xc0\xd1\xda\x63\x9e\x45"
+ "\x42\xe9\x29\xc0\xb4\x07\xb4\x31"
+ "\x66\x77\x72\xb5\xb6\xb3\x57\x46"
+ "\x34\x9a\xfe\x03\xaf\x6b\x36\x07"
+ "\x63\x8e\xc2\x5d\xa6\x0f\xb6\x7d"
+ "\xfb\x6d\x82\x51\xb6\x98\xd0\x71"
+ "\xe7\x10\x7a\xdf\xb2\xbd\xf1\x1d"
+ "\x72\x2b\x54\x13\xe3\x6d\x79\x37"
+ "\xa9\x39\x2c\xdf\x21\xab\x87\xd5"
+ "\xee\xef\x9a\x12\x50\x39\x2e\x1b"
+ "\x7d\xe6\x6a\x27\x48\xb9\xe7\xac"
+ "\xaa\xcd\x79\x5f\xf2\xf3\xa0\x08"
+ "\x6f\x2c\xf4\x0e\xd1\xb8\x89\x25"
+ "\x31\x9d\xef\xb1\x1d\x27\x55\x04"
+ "\xc9\x8c\xb7\x68\xdc\xb6\x67\x8a"
+ "\xdb\xcf\x22\xf2\x3b\x6f\xce\xbb"
+ "\x26\xbe\x4f\x27\x04\x42\xd1\x44"
+ "\x4c\x08\xa3\x95\x4c\x7f\x1a\xaf"
+ "\x1d\x28\x14\xfd\xb1\x1a\x34\x18"
+ "\xf5\x1e\x28\x69\x95\x6a\x5a\xba"
+ "\x8e\xb2\x58\x1d\x28\x17\x13\x3d"
+ "\x38\x7d\x14\x8d\xab\x5d\xf9\xe8"
+ "\x3c\x0f\x2b\x0d\x2b\x08\xb4\x4b"
+ "\x6b\x0d\xc8\xa7\x84\xc2\x3a\x1a"
+ "\xb7\xbd\xda\x92\x29\xb8\x5b\x5a"
+ "\x63\xa5\x99\x82\x09\x72\x8f\xc6"
+ "\xa4\x62\x24\x69\x8c\x2d\x26\x00"
+ "\x99\x83\x91\xd6\xc6\xcf\x57\x67"
+ "\x38\xea\xf2\xfc\x29\xe0\x73\x39"
+ "\xf9\x13\x94\x6d\xe2\x58\x28\x75"
+ "\x3e\xae\x71\x90\x07\x70\x1c\x38"
+ "\x5b\x4c\x1e\xb5\xa5\x3b\x20\xef"
+ "\xb1\x4c\x3e\x1a\x72\x62\xbb\x22"
+ "\x82\x09\xe3\x18\x3f\x4f\x48\xfc"
+ "\xdd\xac\xfc\xb6\x09\xdb\xd2\x7b"
+ "\xd6\xb7\x7e\x41\x2f\x14\xf5\x0e"
+ "\xc3\xac\x4a\xed\xe7\x82\xef\x31"
+ "\x1f\x1a\x51\x1e\x29\x60\xc8\x98"
+ "\x93\x51\x1d\x3d\x62\x59\x83\x82"
+ "\x0c\xf1\xd7\x8d\xac\x33\x44\x81"
+ "\x3c\x59\xb7\xd4\x5b\x65\x82\xc4"
+ "\xec\xdc\x24\xfd\x0e\x1a\x79\x94"
+ "\x34\xb0\x62\xfa\x98\x49\x26\x1f"
+ "\xf4\x9e\x40\x44\x5b\x1f\xf8\xbe"
+ "\x36\xff\xc6\xc6\x9d\xf2\xd6\xcc"
+ "\x63\x93\x29\xb9\x0b\x6d\xd7\x6c"
+ "\xdb\xf6\x21\x80\xf7\x5a\x37\x15"
+ "\x0c\xe3\x36\xc8\x74\x75\x20\x91"
+ "\xdf\x52\x2d\x0c\xe7\x45\xff\x46"
+ "\xb3\xf4\xec\xc2\xbd\xd3\x37\xb6"
+ "\x26\xa2\x5d\x7d\x61\xbf\x10\x46"
+ "\x57\x8d\x05\x96\x70\x0b\xd6\x41"
+ "\x5c\xe9\xd3\x54\x81\x39\x3a\xdd"
+ "\x5f\x92\x81\x6e\x35\x03\xd4\x72"
+ "\x3d\x5a\xe7\xb9\x3b\x0c\x84\x23"
+ "\x45\x5d\xec\x72\xc1\x52\xef\x2e"
+ "\x81\x00\xd3\xfe\x4c\x3c\x05\x61"
+ "\x80\x18\xc4\x6c\x03\xd3\xb7\xba"
+ "\x11\xd7\xb8\x6e\xea\xe1\x80\x30",
+ .ilen = 512,
+ .result = "\x05\x11\xb7\x18\xab\xc6\x2d\xac"
+ "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c"
+ "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8"
+ "\x50\x38\x1f\x71\x49\xb6\x57\xd6"
+ "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90"
+ "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6"
+ "\xad\x1e\x9e\x20\x5f\x38\xbe\x04"
+ "\xda\x10\x8e\xed\xa2\xa4\x87\xab"
+ "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c"
+ "\xc9\xac\x42\x31\x95\x7c\xc9\x04"
+ "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6"
+ "\x15\xd7\x3f\x4f\x2f\x66\x69\x03"
+ "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65"
+ "\x4c\x96\x12\xed\x7c\x92\x03\x01"
+ "\x6f\xbc\x35\x93\xac\xf1\x27\xf1"
+ "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50"
+ "\x89\xa4\x8e\x66\x44\x85\xcc\xfd"
+ "\x33\x14\x70\xe3\x96\xb2\xc3\xd3"
+ "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5"
+ "\x2d\x64\x75\xdd\xb4\x54\xe6\x74"
+ "\x8c\xd3\x9d\x9e\x86\xab\x51\x53"
+ "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40"
+ "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5"
+ "\x76\x12\x73\x44\x1a\x56\xd7\x72"
+ "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda"
+ "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd"
+ "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60"
+ "\x1a\xe2\x70\x85\x58\xc2\x1b\x09"
+ "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9"
+ "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8"
+ "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8"
+ "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10"
+ "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1"
+ "\x90\x3e\x76\x4a\x74\xa4\x21\x2c"
+ "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e"
+ "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f"
+ "\x8d\x23\x31\x74\x84\xeb\x88\x6e"
+ "\xcc\xb9\xbc\x22\x83\x19\x07\x22"
+ "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78"
+ "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5"
+ "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41"
+ "\x3c\xce\x8f\x42\x60\x71\xa7\x75"
+ "\x08\x40\x65\x8a\x82\xbf\xf5\x43"
+ "\x71\x96\xa9\x4d\x44\x8a\x20\xbe"
+ "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65"
+ "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9"
+ "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4"
+ "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a"
+ "\x62\x73\x65\xfd\x46\x63\x25\x3d"
+ "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf"
+ "\x24\xf3\xb4\xac\x64\xba\xdf\x4b"
+ "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7"
+ "\xc5\x68\x77\x84\x32\x2b\xcc\x85"
+ "\x74\x96\xf0\x12\x77\x61\xb9\xeb"
+ "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8"
+ "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24"
+ "\xda\x39\x87\x45\xc0\x2b\xbb\x01"
+ "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce"
+ "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6"
+ "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32"
+ "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45"
+ "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6"
+ "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4"
+ "\x21\xc4\xc2\x75\x67\x89\x37\x0a",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec tf_xts_enc_tv_template[] = {
+ /* Generated from AES-XTS test vectors */
+{
+ .key = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .ilen = 32,
+ .result = "\x4b\xc9\x44\x4a\x11\xa3\xef\xac"
+ "\x30\x74\xe4\x44\x52\x77\x97\x43"
+ "\xa7\x60\xb2\x45\x2e\xf9\x00\x90"
+ "\x9f\xaa\xfd\x89\x6e\x9d\x4a\xe0",
+ .rlen = 32,
+ }, {
+ .key = "\x11\x11\x11\x11\x11\x11\x11\x11"
+ "\x11\x11\x11\x11\x11\x11\x11\x11"
+ "\x22\x22\x22\x22\x22\x22\x22\x22"
+ "\x22\x22\x22\x22\x22\x22\x22\x22",
+ .klen = 32,
+ .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44",
+ .ilen = 32,
+ .result = "\x57\x0e\x8f\xe5\x2a\x35\x61\x4f"
+ "\x32\xd3\xbd\x36\x05\x15\x44\x2c"
+ "\x58\x06\xf7\xf8\x00\xa8\xb6\xd5"
+ "\xc6\x28\x92\xdb\xd8\x34\xa2\xe9",
+ .rlen = 32,
+ }, {
+ .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8"
+ "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0"
+ "\x22\x22\x22\x22\x22\x22\x22\x22"
+ "\x22\x22\x22\x22\x22\x22\x22\x22",
+ .klen = 32,
+ .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44",
+ .ilen = 32,
+ .result = "\x96\x45\x8f\x8d\x7a\x75\xb1\xde"
+ "\x40\x0c\x89\x56\xf6\x4d\xa7\x07"
+ "\x38\xbb\x5b\xe9\xcd\x84\xae\xb2"
+ "\x7b\x6a\x62\xf4\x8c\xb5\x37\xea",
+ .rlen = 32,
+ }, {
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .ilen = 512,
+ .result = "\xa9\x78\xae\x1e\xea\xa2\x44\x4c"
+ "\xa2\x7a\x64\x1f\xaf\x46\xc1\xe0"
+ "\x6c\xb2\xf3\x92\x9a\xd6\x7d\x58"
+ "\xb8\x2d\xb9\x5d\x58\x07\x66\x50"
+ "\xea\x35\x35\x8c\xb2\x46\x61\x06"
+ "\x5d\x65\xfc\x57\x8f\x69\x74\xab"
+ "\x8a\x06\x69\xb5\x6c\xda\x66\xc7"
+ "\x52\x90\xbb\x8e\x6d\x8b\xb5\xa2"
+ "\x78\x1d\xc2\xa9\xc2\x73\x00\xc3"
+ "\x32\x36\x7c\x97\x6b\x4e\x8a\x50"
+ "\xe4\x91\x83\x96\x8f\xf4\x94\x1a"
+ "\xa6\x27\xe1\x33\xcb\x91\xc6\x5f"
+ "\x94\x75\xbc\xd7\x3e\x3e\x6f\x9e"
+ "\xa9\x31\x80\x5e\xe5\xdb\xc8\x53"
+ "\x01\x73\x68\x32\x25\x19\xfa\xfb"
+ "\xe4\xcf\xb9\x3e\xa2\xa0\x8f\x31"
+ "\xbf\x54\x06\x93\xa8\xb1\x0f\xb6"
+ "\x7c\x3c\xde\x6f\x0f\xfb\x0c\x11"
+ "\x39\x80\x39\x09\x97\x65\xf2\x83"
+ "\xae\xe6\xa1\x6f\x47\xb8\x49\xde"
+ "\x99\x36\x20\x7d\x97\x3b\xec\xfa"
+ "\xb4\x33\x6e\x7a\xc7\x46\x84\x49"
+ "\x91\xcd\xe1\x57\x0d\xed\x40\x08"
+ "\x13\xf1\x4e\x3e\xa4\xa4\x5c\xe6"
+ "\xd2\x0c\x20\x8f\x3e\xdf\x3f\x47"
+ "\x9a\x2f\xde\x6d\x66\xc9\x99\x4a"
+ "\x2d\x9e\x9d\x4b\x1a\x27\xa2\x12"
+ "\x99\xf0\xf8\xb1\xb6\xf6\x57\xc3"
+ "\xca\x1c\xa3\x8e\xed\x39\x28\xb5"
+ "\x10\x1b\x4b\x08\x42\x00\x4a\xd3"
+ "\xad\x5a\xc6\x8e\xc8\xbb\x95\xc4"
+ "\x4b\xaa\xfe\xd5\x42\xa8\xa3\x6d"
+ "\x3c\xf3\x34\x91\x2d\xb4\xdd\x20"
+ "\x0c\x90\x6d\xa3\x9b\x66\x9d\x24"
+ "\x02\xa6\xa9\x3f\x3f\x58\x5d\x47"
+ "\x24\x65\x63\x7e\xbd\x8c\xe6\x52"
+ "\x7d\xef\x33\x53\x63\xec\xaa\x0b"
+ "\x64\x15\xa9\xa6\x1f\x10\x00\x38"
+ "\x35\xa8\xe7\xbe\x23\x70\x22\xe0"
+ "\xd3\xb9\xe6\xfd\xe6\xaa\x03\x50"
+ "\xf3\x3c\x27\x36\x8b\xcc\xfe\x9c"
+ "\x9c\xa3\xb3\xe7\x68\x9b\xa2\x71"
+ "\xe0\x07\xd9\x1f\x68\x1f\xac\x5e"
+ "\x7a\x74\x85\xa9\x6a\x90\xab\x2c"
+ "\x38\x51\xbc\x1f\x43\x4a\x56\x1c"
+ "\xf8\x47\x03\x4e\x67\xa8\x1f\x99"
+ "\x04\x39\x73\x32\xb2\x86\x79\xe7"
+ "\x14\x28\x70\xb8\xe2\x7d\x69\x85"
+ "\xb6\x0f\xc5\xd0\xd0\x01\x5c\xe6"
+ "\x09\x0f\x75\xf7\xb6\x81\xd2\x11"
+ "\x20\x9c\xa1\xee\x11\x44\x79\xd0"
+ "\xb2\x34\x77\xda\x10\x9a\x6f\x6f"
+ "\xef\x7c\xd9\xdc\x35\xb7\x61\xdd"
+ "\xf1\xa4\xc6\x1c\xbf\x05\x22\xac"
+ "\xfe\x2f\x85\x00\x44\xdf\x33\x16"
+ "\x35\xb6\xa3\xd3\x70\xdf\x69\x35"
+ "\x6a\xc7\xb4\x99\x45\x27\xc8\x8e"
+ "\x5a\x14\x30\xd0\x55\x3e\x4f\x64"
+ "\x0d\x38\xe3\xdf\x8b\xa8\x93\x26"
+ "\x75\xae\xf6\xb5\x23\x0b\x17\x31"
+ "\xbf\x27\xb8\xb5\x94\x31\xa7\x8f"
+ "\x43\xc4\x46\x24\x22\x4f\x8f\x7e"
+ "\xe5\xf4\x6d\x1e\x0e\x18\x7a\xbb"
+ "\xa6\x8f\xfb\x49\x49\xd8\x7e\x5a",
+ .rlen = 512,
+ }, {
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x62\x49\x77\x57\x24\x70\x93\x69"
+ "\x99\x59\x57\x49\x66\x96\x76\x27"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95"
+ "\x02\x88\x41\x97\x16\x93\x99\x37"
+ "\x51\x05\x82\x09\x74\x94\x45\x92",
+ .klen = 64,
+ .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .ilen = 512,
+ .result = "\xd7\x4b\x93\x7d\x13\xa2\xa2\xe1"
+ "\x35\x39\x71\x88\x76\x1e\xc9\xea"
+ "\x86\xad\xf3\x14\x48\x3d\x5e\xe9"
+ "\xe9\x2d\xb2\x56\x59\x35\x9d\xec"
+ "\x84\xfa\x7e\x9d\x6d\x33\x36\x8f"
+ "\xce\xf4\xa9\x21\x0b\x5f\x96\xec"
+ "\xcb\xf9\x57\x68\x33\x88\x39\xbf"
+ "\x2f\xbb\x59\x03\xbd\x66\x8b\x11"
+ "\x11\x65\x51\x2e\xb8\x67\x05\xd1"
+ "\x27\x11\x5c\xd4\xcc\x97\xc2\xb3"
+ "\xa9\x55\xaf\x07\x56\xd1\xdc\xf5"
+ "\x85\xdc\x46\xe6\xf0\x24\xeb\x93"
+ "\x4d\xf0\x9b\xf5\x73\x1c\xda\x03"
+ "\x22\xc8\x3a\x4f\xb4\x19\x91\x09"
+ "\x54\x0b\xf6\xfe\x17\x3d\x1a\x53"
+ "\x72\x60\x79\xcb\x0e\x32\x8a\x77"
+ "\xd5\xed\xdb\x33\xd7\x62\x16\x69"
+ "\x63\xe0\xab\xb5\xf6\x9c\x5f\x3d"
+ "\x69\x35\x61\x86\xf8\x86\xb9\x89"
+ "\x6e\x59\x35\xac\xf6\x6b\x33\xa0"
+ "\xea\xef\x96\x62\xd8\xa9\xcf\x56"
+ "\xbf\xdb\x8a\xfd\xa1\x82\x77\x73"
+ "\x3d\x94\x4a\x49\x42\x6d\x08\x60"
+ "\xa1\xea\xab\xb6\x88\x13\x94\xb8"
+ "\x51\x98\xdb\x35\x85\xdf\xf6\xb9"
+ "\x8f\xcd\xdf\x80\xd3\x40\x2d\x72"
+ "\xb8\xb2\x6c\x02\x43\x35\x22\x2a"
+ "\x31\xed\xcd\x16\x19\xdf\x62\x0f"
+ "\x29\xcf\x87\x04\xec\x02\x4f\xe4"
+ "\xa2\xed\x73\xc6\x69\xd3\x7e\x89"
+ "\x0b\x76\x10\x7c\xd6\xf9\x6a\x25"
+ "\xed\xcc\x60\x5d\x61\x20\xc1\x97"
+ "\x56\x91\x57\x28\xbe\x71\x0d\xcd"
+ "\xde\xc4\x9e\x55\x91\xbe\xd1\x28"
+ "\x9b\x90\xeb\x73\xf3\x68\x51\xc6"
+ "\xdf\x82\xcc\xd8\x1f\xce\x5b\x27"
+ "\xc0\x60\x5e\x33\xd6\xa7\x20\xea"
+ "\xb2\x54\xc7\x5d\x6a\x3b\x67\x47"
+ "\xcf\xa0\xe3\xab\x86\xaf\xc1\x42"
+ "\xe6\xb0\x23\x4a\xaf\x53\xdf\xa0"
+ "\xad\x12\x32\x31\x03\xf7\x21\xbe"
+ "\x2d\xd5\x82\x42\xb6\x4a\x3d\xcd"
+ "\xd8\x81\x77\xa9\x49\x98\x6c\x09"
+ "\xc5\xa3\x61\x12\x62\x85\x6b\xcd"
+ "\xb3\xf4\x20\x0c\x41\xc4\x05\x37"
+ "\x46\x5f\xeb\x71\x8b\xf1\xaf\x6e"
+ "\xba\xf3\x50\x2e\xfe\xa8\x37\xeb"
+ "\xe8\x8c\x4f\xa4\x0c\xf1\x31\xc8"
+ "\x6e\x71\x4f\xa5\xd7\x97\x73\xe0"
+ "\x93\x4a\x2f\xda\x7b\xe0\x20\x54"
+ "\x1f\x8d\x85\x79\x0b\x7b\x5e\x75"
+ "\xb9\x07\x67\xcc\xc8\xe7\x21\x15"
+ "\xa7\xc8\x98\xff\x4b\x80\x1c\x12"
+ "\xa8\x54\xe1\x38\x52\xe6\x74\x81"
+ "\x97\x47\xa1\x41\x0e\xc0\x50\xe3"
+ "\x55\x0e\xc3\xa7\x70\x77\xce\x07"
+ "\xed\x8c\x88\xe6\xa1\x5b\x14\xec"
+ "\xe6\xde\x06\x6d\x74\xc5\xd9\xfa"
+ "\xe5\x2f\x5a\xff\xc8\x05\xee\x27"
+ "\x35\x61\xbf\x0b\x19\x78\x9b\xd2"
+ "\x04\xc7\x05\xb1\x79\xb4\xff\x5f"
+ "\xf3\xea\x67\x52\x78\xc2\xce\x70"
+ "\xa4\x05\x0b\xb2\xb3\xa8\x30\x97"
+ "\x37\x30\xe1\x91\x8d\xb3\x2a\xff",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec tf_xts_dec_tv_template[] = {
+ /* Generated from AES-XTS test vectors */
+ /* same as enc vectors with input and result reversed */
+ {
+ .key = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x4b\xc9\x44\x4a\x11\xa3\xef\xac"
+ "\x30\x74\xe4\x44\x52\x77\x97\x43"
+ "\xa7\x60\xb2\x45\x2e\xf9\x00\x90"
+ "\x9f\xaa\xfd\x89\x6e\x9d\x4a\xe0",
+ .ilen = 32,
+ .result = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .rlen = 32,
+ }, {
+ .key = "\x11\x11\x11\x11\x11\x11\x11\x11"
+ "\x11\x11\x11\x11\x11\x11\x11\x11"
+ "\x22\x22\x22\x22\x22\x22\x22\x22"
+ "\x22\x22\x22\x22\x22\x22\x22\x22",
+ .klen = 32,
+ .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x57\x0e\x8f\xe5\x2a\x35\x61\x4f"
+ "\x32\xd3\xbd\x36\x05\x15\x44\x2c"
+ "\x58\x06\xf7\xf8\x00\xa8\xb6\xd5"
+ "\xc6\x28\x92\xdb\xd8\x34\xa2\xe9",
+ .ilen = 32,
+ .result = "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44",
+ .rlen = 32,
+ }, {
+ .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8"
+ "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0"
+ "\x22\x22\x22\x22\x22\x22\x22\x22"
+ "\x22\x22\x22\x22\x22\x22\x22\x22",
+ .klen = 32,
+ .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x96\x45\x8f\x8d\x7a\x75\xb1\xde"
+ "\x40\x0c\x89\x56\xf6\x4d\xa7\x07"
+ "\x38\xbb\x5b\xe9\xcd\x84\xae\xb2"
+ "\x7b\x6a\x62\xf4\x8c\xb5\x37\xea",
+ .ilen = 32,
+ .result = "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44",
+ .rlen = 32,
+ }, {
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\xa9\x78\xae\x1e\xea\xa2\x44\x4c"
+ "\xa2\x7a\x64\x1f\xaf\x46\xc1\xe0"
+ "\x6c\xb2\xf3\x92\x9a\xd6\x7d\x58"
+ "\xb8\x2d\xb9\x5d\x58\x07\x66\x50"
+ "\xea\x35\x35\x8c\xb2\x46\x61\x06"
+ "\x5d\x65\xfc\x57\x8f\x69\x74\xab"
+ "\x8a\x06\x69\xb5\x6c\xda\x66\xc7"
+ "\x52\x90\xbb\x8e\x6d\x8b\xb5\xa2"
+ "\x78\x1d\xc2\xa9\xc2\x73\x00\xc3"
+ "\x32\x36\x7c\x97\x6b\x4e\x8a\x50"
+ "\xe4\x91\x83\x96\x8f\xf4\x94\x1a"
+ "\xa6\x27\xe1\x33\xcb\x91\xc6\x5f"
+ "\x94\x75\xbc\xd7\x3e\x3e\x6f\x9e"
+ "\xa9\x31\x80\x5e\xe5\xdb\xc8\x53"
+ "\x01\x73\x68\x32\x25\x19\xfa\xfb"
+ "\xe4\xcf\xb9\x3e\xa2\xa0\x8f\x31"
+ "\xbf\x54\x06\x93\xa8\xb1\x0f\xb6"
+ "\x7c\x3c\xde\x6f\x0f\xfb\x0c\x11"
+ "\x39\x80\x39\x09\x97\x65\xf2\x83"
+ "\xae\xe6\xa1\x6f\x47\xb8\x49\xde"
+ "\x99\x36\x20\x7d\x97\x3b\xec\xfa"
+ "\xb4\x33\x6e\x7a\xc7\x46\x84\x49"
+ "\x91\xcd\xe1\x57\x0d\xed\x40\x08"
+ "\x13\xf1\x4e\x3e\xa4\xa4\x5c\xe6"
+ "\xd2\x0c\x20\x8f\x3e\xdf\x3f\x47"
+ "\x9a\x2f\xde\x6d\x66\xc9\x99\x4a"
+ "\x2d\x9e\x9d\x4b\x1a\x27\xa2\x12"
+ "\x99\xf0\xf8\xb1\xb6\xf6\x57\xc3"
+ "\xca\x1c\xa3\x8e\xed\x39\x28\xb5"
+ "\x10\x1b\x4b\x08\x42\x00\x4a\xd3"
+ "\xad\x5a\xc6\x8e\xc8\xbb\x95\xc4"
+ "\x4b\xaa\xfe\xd5\x42\xa8\xa3\x6d"
+ "\x3c\xf3\x34\x91\x2d\xb4\xdd\x20"
+ "\x0c\x90\x6d\xa3\x9b\x66\x9d\x24"
+ "\x02\xa6\xa9\x3f\x3f\x58\x5d\x47"
+ "\x24\x65\x63\x7e\xbd\x8c\xe6\x52"
+ "\x7d\xef\x33\x53\x63\xec\xaa\x0b"
+ "\x64\x15\xa9\xa6\x1f\x10\x00\x38"
+ "\x35\xa8\xe7\xbe\x23\x70\x22\xe0"
+ "\xd3\xb9\xe6\xfd\xe6\xaa\x03\x50"
+ "\xf3\x3c\x27\x36\x8b\xcc\xfe\x9c"
+ "\x9c\xa3\xb3\xe7\x68\x9b\xa2\x71"
+ "\xe0\x07\xd9\x1f\x68\x1f\xac\x5e"
+ "\x7a\x74\x85\xa9\x6a\x90\xab\x2c"
+ "\x38\x51\xbc\x1f\x43\x4a\x56\x1c"
+ "\xf8\x47\x03\x4e\x67\xa8\x1f\x99"
+ "\x04\x39\x73\x32\xb2\x86\x79\xe7"
+ "\x14\x28\x70\xb8\xe2\x7d\x69\x85"
+ "\xb6\x0f\xc5\xd0\xd0\x01\x5c\xe6"
+ "\x09\x0f\x75\xf7\xb6\x81\xd2\x11"
+ "\x20\x9c\xa1\xee\x11\x44\x79\xd0"
+ "\xb2\x34\x77\xda\x10\x9a\x6f\x6f"
+ "\xef\x7c\xd9\xdc\x35\xb7\x61\xdd"
+ "\xf1\xa4\xc6\x1c\xbf\x05\x22\xac"
+ "\xfe\x2f\x85\x00\x44\xdf\x33\x16"
+ "\x35\xb6\xa3\xd3\x70\xdf\x69\x35"
+ "\x6a\xc7\xb4\x99\x45\x27\xc8\x8e"
+ "\x5a\x14\x30\xd0\x55\x3e\x4f\x64"
+ "\x0d\x38\xe3\xdf\x8b\xa8\x93\x26"
+ "\x75\xae\xf6\xb5\x23\x0b\x17\x31"
+ "\xbf\x27\xb8\xb5\x94\x31\xa7\x8f"
+ "\x43\xc4\x46\x24\x22\x4f\x8f\x7e"
+ "\xe5\xf4\x6d\x1e\x0e\x18\x7a\xbb"
+ "\xa6\x8f\xfb\x49\x49\xd8\x7e\x5a",
+ .ilen = 512,
+ .result = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .rlen = 512,
+ }, {
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x62\x49\x77\x57\x24\x70\x93\x69"
+ "\x99\x59\x57\x49\x66\x96\x76\x27"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95"
+ "\x02\x88\x41\x97\x16\x93\x99\x37"
+ "\x51\x05\x82\x09\x74\x94\x45\x92",
+ .klen = 64,
+ .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\xd7\x4b\x93\x7d\x13\xa2\xa2\xe1"
+ "\x35\x39\x71\x88\x76\x1e\xc9\xea"
+ "\x86\xad\xf3\x14\x48\x3d\x5e\xe9"
+ "\xe9\x2d\xb2\x56\x59\x35\x9d\xec"
+ "\x84\xfa\x7e\x9d\x6d\x33\x36\x8f"
+ "\xce\xf4\xa9\x21\x0b\x5f\x96\xec"
+ "\xcb\xf9\x57\x68\x33\x88\x39\xbf"
+ "\x2f\xbb\x59\x03\xbd\x66\x8b\x11"
+ "\x11\x65\x51\x2e\xb8\x67\x05\xd1"
+ "\x27\x11\x5c\xd4\xcc\x97\xc2\xb3"
+ "\xa9\x55\xaf\x07\x56\xd1\xdc\xf5"
+ "\x85\xdc\x46\xe6\xf0\x24\xeb\x93"
+ "\x4d\xf0\x9b\xf5\x73\x1c\xda\x03"
+ "\x22\xc8\x3a\x4f\xb4\x19\x91\x09"
+ "\x54\x0b\xf6\xfe\x17\x3d\x1a\x53"
+ "\x72\x60\x79\xcb\x0e\x32\x8a\x77"
+ "\xd5\xed\xdb\x33\xd7\x62\x16\x69"
+ "\x63\xe0\xab\xb5\xf6\x9c\x5f\x3d"
+ "\x69\x35\x61\x86\xf8\x86\xb9\x89"
+ "\x6e\x59\x35\xac\xf6\x6b\x33\xa0"
+ "\xea\xef\x96\x62\xd8\xa9\xcf\x56"
+ "\xbf\xdb\x8a\xfd\xa1\x82\x77\x73"
+ "\x3d\x94\x4a\x49\x42\x6d\x08\x60"
+ "\xa1\xea\xab\xb6\x88\x13\x94\xb8"
+ "\x51\x98\xdb\x35\x85\xdf\xf6\xb9"
+ "\x8f\xcd\xdf\x80\xd3\x40\x2d\x72"
+ "\xb8\xb2\x6c\x02\x43\x35\x22\x2a"
+ "\x31\xed\xcd\x16\x19\xdf\x62\x0f"
+ "\x29\xcf\x87\x04\xec\x02\x4f\xe4"
+ "\xa2\xed\x73\xc6\x69\xd3\x7e\x89"
+ "\x0b\x76\x10\x7c\xd6\xf9\x6a\x25"
+ "\xed\xcc\x60\x5d\x61\x20\xc1\x97"
+ "\x56\x91\x57\x28\xbe\x71\x0d\xcd"
+ "\xde\xc4\x9e\x55\x91\xbe\xd1\x28"
+ "\x9b\x90\xeb\x73\xf3\x68\x51\xc6"
+ "\xdf\x82\xcc\xd8\x1f\xce\x5b\x27"
+ "\xc0\x60\x5e\x33\xd6\xa7\x20\xea"
+ "\xb2\x54\xc7\x5d\x6a\x3b\x67\x47"
+ "\xcf\xa0\xe3\xab\x86\xaf\xc1\x42"
+ "\xe6\xb0\x23\x4a\xaf\x53\xdf\xa0"
+ "\xad\x12\x32\x31\x03\xf7\x21\xbe"
+ "\x2d\xd5\x82\x42\xb6\x4a\x3d\xcd"
+ "\xd8\x81\x77\xa9\x49\x98\x6c\x09"
+ "\xc5\xa3\x61\x12\x62\x85\x6b\xcd"
+ "\xb3\xf4\x20\x0c\x41\xc4\x05\x37"
+ "\x46\x5f\xeb\x71\x8b\xf1\xaf\x6e"
+ "\xba\xf3\x50\x2e\xfe\xa8\x37\xeb"
+ "\xe8\x8c\x4f\xa4\x0c\xf1\x31\xc8"
+ "\x6e\x71\x4f\xa5\xd7\x97\x73\xe0"
+ "\x93\x4a\x2f\xda\x7b\xe0\x20\x54"
+ "\x1f\x8d\x85\x79\x0b\x7b\x5e\x75"
+ "\xb9\x07\x67\xcc\xc8\xe7\x21\x15"
+ "\xa7\xc8\x98\xff\x4b\x80\x1c\x12"
+ "\xa8\x54\xe1\x38\x52\xe6\x74\x81"
+ "\x97\x47\xa1\x41\x0e\xc0\x50\xe3"
+ "\x55\x0e\xc3\xa7\x70\x77\xce\x07"
+ "\xed\x8c\x88\xe6\xa1\x5b\x14\xec"
+ "\xe6\xde\x06\x6d\x74\xc5\xd9\xfa"
+ "\xe5\x2f\x5a\xff\xc8\x05\xee\x27"
+ "\x35\x61\xbf\x0b\x19\x78\x9b\xd2"
+ "\x04\xc7\x05\xb1\x79\xb4\xff\x5f"
+ "\xf3\xea\x67\x52\x78\xc2\xce\x70"
+ "\xa4\x05\x0b\xb2\xb3\xa8\x30\x97"
+ "\x37\x30\xe1\x91\x8d\xb3\x2a\xff",
+ .ilen = 512,
+ .result = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
},
};
@@ -2572,12 +9249,24 @@ static struct cipher_testvec tf_cbc_dec_tv_template[] = {
* Serpent test vectors. These are backwards because Serpent writes
* octet sequences in right-to-left mode.
*/
-#define SERPENT_ENC_TEST_VECTORS 4
-#define SERPENT_DEC_TEST_VECTORS 4
+#define SERPENT_ENC_TEST_VECTORS 5
+#define SERPENT_DEC_TEST_VECTORS 5
#define TNEPRES_ENC_TEST_VECTORS 4
#define TNEPRES_DEC_TEST_VECTORS 4
+#define SERPENT_CBC_ENC_TEST_VECTORS 1
+#define SERPENT_CBC_DEC_TEST_VECTORS 1
+
+#define SERPENT_CTR_ENC_TEST_VECTORS 2
+#define SERPENT_CTR_DEC_TEST_VECTORS 2
+
+#define SERPENT_LRW_ENC_TEST_VECTORS 8
+#define SERPENT_LRW_DEC_TEST_VECTORS 8
+
+#define SERPENT_XTS_ENC_TEST_VECTORS 5
+#define SERPENT_XTS_DEC_TEST_VECTORS 5
+
static struct cipher_testvec serpent_enc_tv_template[] = {
{
.input = "\x00\x01\x02\x03\x04\x05\x06\x07"
@@ -2616,6 +9305,141 @@ static struct cipher_testvec serpent_enc_tv_template[] = {
.result = "\xdd\xd2\x6b\x98\xa5\xff\xd8\x2c"
"\x05\x34\x5a\x9d\xad\xbf\xaf\x49",
.rlen = 16,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\xFB\xB0\x5D\xDE\xC0\xFE\xFC\xEB"
+ "\xB1\x80\x10\x43\xDE\x62\x70\xBD"
+ "\xFA\x8A\x93\xEA\x6B\xF7\xC5\xD7"
+ "\x0C\xD1\xBB\x29\x25\x14\x4C\x22"
+ "\x77\xA6\x38\x00\xDB\xB9\xE2\x07"
+ "\xD1\xAC\x82\xBA\xEA\x67\xAA\x39"
+ "\x99\x34\x89\x5B\x54\xE9\x12\x13"
+ "\x3B\x04\xE5\x12\x42\xC5\x79\xAB"
+ "\x0D\xC7\x3C\x58\x2D\xA3\x98\xF6"
+ "\xE4\x61\x9E\x17\x0B\xCE\xE8\xAA"
+ "\xB5\x6C\x1A\x3A\x67\x52\x81\x6A"
+ "\x04\xFF\x8A\x1B\x96\xFE\xE6\x87"
+ "\x3C\xD4\x39\x7D\x36\x9B\x03\xD5"
+ "\xB6\xA0\x75\x3C\x83\xE6\x1C\x73"
+ "\x9D\x74\x2B\x77\x53\x2D\xE5\xBD"
+ "\x69\xDA\x7A\x01\xF5\x6A\x70\x39"
+ "\x30\xD4\x2C\xF2\x8E\x06\x4B\x39"
+ "\xB3\x12\x1D\xB3\x17\x46\xE6\xD6"
+ "\xB6\x31\x36\x34\x38\x3C\x1D\x69"
+ "\x9F\x47\x28\x9A\x1D\x96\x70\x54"
+ "\x8E\x88\xCB\xE0\xF5\x6A\xAE\x0A"
+ "\x3C\xD5\x93\x1C\x21\xC9\x14\x3A"
+ "\x23\x9C\x9B\x79\xC7\x75\xC8\x39"
+ "\xA6\xAC\x65\x9A\x99\x37\xAF\x6D"
+ "\xBD\xB5\x32\xFD\xD8\x9C\x95\x7B"
+ "\xC6\x6A\x80\x64\xEA\xEF\x6D\x3F"
+ "\xA9\xFE\x5B\x16\xA3\xCF\x32\xC8"
+ "\xEF\x50\x22\x20\x93\x30\xBE\xE2"
+ "\x38\x05\x65\xAF\xBA\xB6\xE4\x72"
+ "\xA9\xEE\x05\x42\x88\xBD\x9D\x49"
+ "\xAD\x93\xCA\x4D\x45\x11\x43\x4D"
+ "\xB8\xF5\x74\x2B\x48\xE7\x21\xE4"
+ "\x4E\x3A\x4C\xDE\x65\x7A\x5A\xAD"
+ "\x86\xE6\x23\xEC\x6B\xA7\x17\xE6"
+ "\xF6\xA1\xAC\x29\xAE\xF9\x9B\x69"
+ "\x73\x65\x65\x51\xD6\x0B\x4E\x8C"
+ "\x17\x15\x9D\xB0\xCF\xB2\x42\x2B"
+ "\x51\xC3\x03\xE8\xB7\x7D\x2D\x39"
+ "\xE8\x10\x93\x16\xC8\x68\x4C\x60"
+ "\x87\x70\x14\xD0\x01\x57\xCB\x42"
+ "\x13\x59\xB1\x7F\x12\x4F\xBB\xC7"
+ "\xBD\x2B\xD4\xA9\x12\x26\x4F\xDE"
+ "\xFD\x72\xEC\xD7\x6F\x97\x14\x90"
+ "\x0E\x37\x13\xE6\x67\x1D\xE5\xFE"
+ "\x9E\x18\x3C\x8F\x3A\x3F\x59\x9B"
+ "\x71\x80\x05\x35\x3F\x40\x0B\x21"
+ "\x76\xE5\xEF\x42\x6C\xDB\x31\x05"
+ "\x5F\x05\xCF\x14\xE3\xF0\x61\xA2"
+ "\x49\x03\x5E\x77\x2E\x20\xBA\xA1"
+ "\xAF\x46\x51\xC0\x2B\xC4\x64\x1E"
+ "\x65\xCC\x51\x58\x0A\xDF\xF0\x5F"
+ "\x75\x9F\x48\xCD\x81\xEC\xC3\xF6"
+ "\xED\xC9\x4B\x7B\x4E\x26\x23\xE1"
+ "\xBB\xE9\x83\x0B\xCF\xE4\xDE\x00"
+ "\x48\xFF\xBF\x6C\xB4\x72\x16\xEF"
+ "\xC7\x46\xEE\x48\x8C\xB8\xAF\x45"
+ "\x91\x76\xE7\x6E\x65\x3D\x15\x86"
+ "\x10\xF8\xDB\x66\x97\x7C\x43\x4D"
+ "\x79\x12\x4E\xCE\x06\xD1\xD1\x6A"
+ "\x34\xC1\xC9\xF2\x28\x4A\xCD\x02"
+ "\x75\x55\x9B\xFF\x36\x73\xAB\x7C"
+ "\xF4\x46\x2E\xEB\xAC\xF3\xD2\xB7",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -2707,6 +9531,141 @@ static struct cipher_testvec serpent_dec_tv_template[] = {
.ilen = 16,
.result = zeroed_string,
.rlen = 16,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .input = "\xFB\xB0\x5D\xDE\xC0\xFE\xFC\xEB"
+ "\xB1\x80\x10\x43\xDE\x62\x70\xBD"
+ "\xFA\x8A\x93\xEA\x6B\xF7\xC5\xD7"
+ "\x0C\xD1\xBB\x29\x25\x14\x4C\x22"
+ "\x77\xA6\x38\x00\xDB\xB9\xE2\x07"
+ "\xD1\xAC\x82\xBA\xEA\x67\xAA\x39"
+ "\x99\x34\x89\x5B\x54\xE9\x12\x13"
+ "\x3B\x04\xE5\x12\x42\xC5\x79\xAB"
+ "\x0D\xC7\x3C\x58\x2D\xA3\x98\xF6"
+ "\xE4\x61\x9E\x17\x0B\xCE\xE8\xAA"
+ "\xB5\x6C\x1A\x3A\x67\x52\x81\x6A"
+ "\x04\xFF\x8A\x1B\x96\xFE\xE6\x87"
+ "\x3C\xD4\x39\x7D\x36\x9B\x03\xD5"
+ "\xB6\xA0\x75\x3C\x83\xE6\x1C\x73"
+ "\x9D\x74\x2B\x77\x53\x2D\xE5\xBD"
+ "\x69\xDA\x7A\x01\xF5\x6A\x70\x39"
+ "\x30\xD4\x2C\xF2\x8E\x06\x4B\x39"
+ "\xB3\x12\x1D\xB3\x17\x46\xE6\xD6"
+ "\xB6\x31\x36\x34\x38\x3C\x1D\x69"
+ "\x9F\x47\x28\x9A\x1D\x96\x70\x54"
+ "\x8E\x88\xCB\xE0\xF5\x6A\xAE\x0A"
+ "\x3C\xD5\x93\x1C\x21\xC9\x14\x3A"
+ "\x23\x9C\x9B\x79\xC7\x75\xC8\x39"
+ "\xA6\xAC\x65\x9A\x99\x37\xAF\x6D"
+ "\xBD\xB5\x32\xFD\xD8\x9C\x95\x7B"
+ "\xC6\x6A\x80\x64\xEA\xEF\x6D\x3F"
+ "\xA9\xFE\x5B\x16\xA3\xCF\x32\xC8"
+ "\xEF\x50\x22\x20\x93\x30\xBE\xE2"
+ "\x38\x05\x65\xAF\xBA\xB6\xE4\x72"
+ "\xA9\xEE\x05\x42\x88\xBD\x9D\x49"
+ "\xAD\x93\xCA\x4D\x45\x11\x43\x4D"
+ "\xB8\xF5\x74\x2B\x48\xE7\x21\xE4"
+ "\x4E\x3A\x4C\xDE\x65\x7A\x5A\xAD"
+ "\x86\xE6\x23\xEC\x6B\xA7\x17\xE6"
+ "\xF6\xA1\xAC\x29\xAE\xF9\x9B\x69"
+ "\x73\x65\x65\x51\xD6\x0B\x4E\x8C"
+ "\x17\x15\x9D\xB0\xCF\xB2\x42\x2B"
+ "\x51\xC3\x03\xE8\xB7\x7D\x2D\x39"
+ "\xE8\x10\x93\x16\xC8\x68\x4C\x60"
+ "\x87\x70\x14\xD0\x01\x57\xCB\x42"
+ "\x13\x59\xB1\x7F\x12\x4F\xBB\xC7"
+ "\xBD\x2B\xD4\xA9\x12\x26\x4F\xDE"
+ "\xFD\x72\xEC\xD7\x6F\x97\x14\x90"
+ "\x0E\x37\x13\xE6\x67\x1D\xE5\xFE"
+ "\x9E\x18\x3C\x8F\x3A\x3F\x59\x9B"
+ "\x71\x80\x05\x35\x3F\x40\x0B\x21"
+ "\x76\xE5\xEF\x42\x6C\xDB\x31\x05"
+ "\x5F\x05\xCF\x14\xE3\xF0\x61\xA2"
+ "\x49\x03\x5E\x77\x2E\x20\xBA\xA1"
+ "\xAF\x46\x51\xC0\x2B\xC4\x64\x1E"
+ "\x65\xCC\x51\x58\x0A\xDF\xF0\x5F"
+ "\x75\x9F\x48\xCD\x81\xEC\xC3\xF6"
+ "\xED\xC9\x4B\x7B\x4E\x26\x23\xE1"
+ "\xBB\xE9\x83\x0B\xCF\xE4\xDE\x00"
+ "\x48\xFF\xBF\x6C\xB4\x72\x16\xEF"
+ "\xC7\x46\xEE\x48\x8C\xB8\xAF\x45"
+ "\x91\x76\xE7\x6E\x65\x3D\x15\x86"
+ "\x10\xF8\xDB\x66\x97\x7C\x43\x4D"
+ "\x79\x12\x4E\xCE\x06\xD1\xD1\x6A"
+ "\x34\xC1\xC9\xF2\x28\x4A\xCD\x02"
+ "\x75\x55\x9B\xFF\x36\x73\xAB\x7C"
+ "\xF4\x46\x2E\xEB\xAC\xF3\xD2\xB7",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -2751,10 +9710,2311 @@ static struct cipher_testvec tnepres_dec_tv_template[] = {
},
};
+static struct cipher_testvec serpent_cbc_enc_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\x80\xCF\x11\x41\x1A\xB9\x4B\x9C"
+ "\xFF\xB7\x6C\xEA\xF0\xAF\x77\x6E"
+ "\x71\x75\x95\x9D\x4E\x1C\xCF\xAD"
+ "\x81\x34\xE9\x8F\xAE\x5A\x91\x1C"
+ "\x38\x63\x35\x7E\x79\x18\x0A\xE8"
+ "\x67\x06\x76\xD5\xFF\x22\x2F\xDA"
+ "\xB6\x2D\x57\x13\xB6\x3C\xBC\x97"
+ "\xFE\x53\x75\x35\x97\x7F\x51\xEA"
+ "\xDF\x5D\xE8\x9D\xCC\xD9\xAE\xE7"
+ "\x62\x67\xFF\x04\xC2\x18\x22\x5F"
+ "\x2E\x06\xC1\xE2\x26\xCD\xC6\x1E"
+ "\xE5\x2C\x4E\x87\x23\xDD\xF0\x41"
+ "\x08\xA5\xB4\x3E\x07\x1E\x0B\xBB"
+ "\x72\x84\xF8\x0A\x3F\x38\x5E\x91"
+ "\x15\x26\xE1\xDB\xA4\x3D\x74\xD2"
+ "\x41\x1E\x3F\xA9\xC6\x7D\x2A\xAB"
+ "\x27\xDF\x89\x1D\x86\x3E\xF7\x5A"
+ "\xF6\xE3\x0F\xC7\x6B\x4C\x96\x7C"
+ "\x2D\x12\xA5\x05\x92\xCB\xD7\x4A"
+ "\x4D\x1E\x88\x21\xE1\x63\xB4\xFC"
+ "\x4A\xF2\xCD\x35\xB9\xD7\x70\x97"
+ "\x5A\x5E\x7E\x96\x52\x20\xDC\x25"
+ "\xE9\x6B\x36\xB4\xE0\x98\x85\x2C"
+ "\x3C\xD2\xF7\x78\x8A\x73\x26\x9B"
+ "\xAF\x0B\x11\xE8\x4D\x67\x23\xE9"
+ "\x77\xDF\x58\xF6\x6F\x9E\xA4\xC5"
+ "\x10\xA1\x82\x0E\x80\xA0\x8F\x4B"
+ "\xA1\xC0\x12\x54\x4E\xC9\x20\x92"
+ "\x11\x00\x10\x4E\xB3\x7C\xCA\x63"
+ "\xE5\x3F\xD3\x41\x37\xCD\x74\xB7"
+ "\xA5\x7C\x61\xB8\x0B\x7A\x7F\x4D"
+ "\xFE\x96\x7D\x1B\xBE\x60\x37\xB7"
+ "\x81\x92\x66\x67\x15\x1E\x39\x98"
+ "\x52\xC0\xF4\x69\xC0\x99\x4F\x5A"
+ "\x2E\x32\xAD\x7C\x8B\xE9\xAD\x05"
+ "\x55\xF9\x0A\x1F\x97\x5C\xFA\x2B"
+ "\xF4\x99\x76\x3A\x6E\x4D\xE1\x4C"
+ "\x14\x4E\x6F\x87\xEE\x1A\x85\xA3"
+ "\x96\xC6\x66\x49\xDA\x0D\x71\xAC"
+ "\x04\x05\x46\xD3\x90\x0F\x64\x64"
+ "\x01\x66\x2C\x62\x5D\x34\xD1\xCB"
+ "\x3A\x24\xCE\x95\xEF\xAE\x2C\x97"
+ "\x0E\x0C\x1D\x36\x49\xEB\xE9\x3D"
+ "\x62\xA6\x19\x28\x9E\x26\xB4\x3F"
+ "\xD7\x55\x42\x3C\xCD\x72\x0A\xF0"
+ "\x7D\xE9\x95\x45\x86\xED\xB1\xE0"
+ "\x8D\xE9\xC5\x86\x13\x24\x28\x7D"
+ "\x74\xEF\xCA\x50\x12\x7E\x64\x8F"
+ "\x1B\xF5\x5B\xFE\xE2\xAC\xFA\xE7"
+ "\xBD\x38\x8C\x11\x20\xEF\xB1\xAA"
+ "\x7B\xE5\xE5\x78\xAD\x9D\x2D\xA2"
+ "\x8E\xDD\x48\xB3\xEF\x18\x92\x7E"
+ "\xE6\x75\x0D\x54\x64\x11\xA3\x3A"
+ "\xDB\x97\x0F\xD3\xDF\x07\xD3\x7E"
+ "\x1E\xD1\x87\xE4\x74\xBB\x46\xF4"
+ "\xBA\x23\x2D\x8D\x29\x07\x12\xCF"
+ "\x34\xCD\x72\x7F\x01\x30\xE7\xA0"
+ "\xF8\xDD\xA8\x08\xF0\xBC\xB1\xA2"
+ "\xCC\xE1\x6B\x5F\xBE\xEA\xF1\xE4"
+ "\x02\xC4\xAF\xFA\xAD\x31\xF4\xBF"
+ "\xFC\x66\xAA\x37\xF2\x37\x39\x6B"
+ "\xBC\x08\x3A\xA2\x29\xB3\xDF\xD1",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec serpent_cbc_dec_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x80\xCF\x11\x41\x1A\xB9\x4B\x9C"
+ "\xFF\xB7\x6C\xEA\xF0\xAF\x77\x6E"
+ "\x71\x75\x95\x9D\x4E\x1C\xCF\xAD"
+ "\x81\x34\xE9\x8F\xAE\x5A\x91\x1C"
+ "\x38\x63\x35\x7E\x79\x18\x0A\xE8"
+ "\x67\x06\x76\xD5\xFF\x22\x2F\xDA"
+ "\xB6\x2D\x57\x13\xB6\x3C\xBC\x97"
+ "\xFE\x53\x75\x35\x97\x7F\x51\xEA"
+ "\xDF\x5D\xE8\x9D\xCC\xD9\xAE\xE7"
+ "\x62\x67\xFF\x04\xC2\x18\x22\x5F"
+ "\x2E\x06\xC1\xE2\x26\xCD\xC6\x1E"
+ "\xE5\x2C\x4E\x87\x23\xDD\xF0\x41"
+ "\x08\xA5\xB4\x3E\x07\x1E\x0B\xBB"
+ "\x72\x84\xF8\x0A\x3F\x38\x5E\x91"
+ "\x15\x26\xE1\xDB\xA4\x3D\x74\xD2"
+ "\x41\x1E\x3F\xA9\xC6\x7D\x2A\xAB"
+ "\x27\xDF\x89\x1D\x86\x3E\xF7\x5A"
+ "\xF6\xE3\x0F\xC7\x6B\x4C\x96\x7C"
+ "\x2D\x12\xA5\x05\x92\xCB\xD7\x4A"
+ "\x4D\x1E\x88\x21\xE1\x63\xB4\xFC"
+ "\x4A\xF2\xCD\x35\xB9\xD7\x70\x97"
+ "\x5A\x5E\x7E\x96\x52\x20\xDC\x25"
+ "\xE9\x6B\x36\xB4\xE0\x98\x85\x2C"
+ "\x3C\xD2\xF7\x78\x8A\x73\x26\x9B"
+ "\xAF\x0B\x11\xE8\x4D\x67\x23\xE9"
+ "\x77\xDF\x58\xF6\x6F\x9E\xA4\xC5"
+ "\x10\xA1\x82\x0E\x80\xA0\x8F\x4B"
+ "\xA1\xC0\x12\x54\x4E\xC9\x20\x92"
+ "\x11\x00\x10\x4E\xB3\x7C\xCA\x63"
+ "\xE5\x3F\xD3\x41\x37\xCD\x74\xB7"
+ "\xA5\x7C\x61\xB8\x0B\x7A\x7F\x4D"
+ "\xFE\x96\x7D\x1B\xBE\x60\x37\xB7"
+ "\x81\x92\x66\x67\x15\x1E\x39\x98"
+ "\x52\xC0\xF4\x69\xC0\x99\x4F\x5A"
+ "\x2E\x32\xAD\x7C\x8B\xE9\xAD\x05"
+ "\x55\xF9\x0A\x1F\x97\x5C\xFA\x2B"
+ "\xF4\x99\x76\x3A\x6E\x4D\xE1\x4C"
+ "\x14\x4E\x6F\x87\xEE\x1A\x85\xA3"
+ "\x96\xC6\x66\x49\xDA\x0D\x71\xAC"
+ "\x04\x05\x46\xD3\x90\x0F\x64\x64"
+ "\x01\x66\x2C\x62\x5D\x34\xD1\xCB"
+ "\x3A\x24\xCE\x95\xEF\xAE\x2C\x97"
+ "\x0E\x0C\x1D\x36\x49\xEB\xE9\x3D"
+ "\x62\xA6\x19\x28\x9E\x26\xB4\x3F"
+ "\xD7\x55\x42\x3C\xCD\x72\x0A\xF0"
+ "\x7D\xE9\x95\x45\x86\xED\xB1\xE0"
+ "\x8D\xE9\xC5\x86\x13\x24\x28\x7D"
+ "\x74\xEF\xCA\x50\x12\x7E\x64\x8F"
+ "\x1B\xF5\x5B\xFE\xE2\xAC\xFA\xE7"
+ "\xBD\x38\x8C\x11\x20\xEF\xB1\xAA"
+ "\x7B\xE5\xE5\x78\xAD\x9D\x2D\xA2"
+ "\x8E\xDD\x48\xB3\xEF\x18\x92\x7E"
+ "\xE6\x75\x0D\x54\x64\x11\xA3\x3A"
+ "\xDB\x97\x0F\xD3\xDF\x07\xD3\x7E"
+ "\x1E\xD1\x87\xE4\x74\xBB\x46\xF4"
+ "\xBA\x23\x2D\x8D\x29\x07\x12\xCF"
+ "\x34\xCD\x72\x7F\x01\x30\xE7\xA0"
+ "\xF8\xDD\xA8\x08\xF0\xBC\xB1\xA2"
+ "\xCC\xE1\x6B\x5F\xBE\xEA\xF1\xE4"
+ "\x02\xC4\xAF\xFA\xAD\x31\xF4\xBF"
+ "\xFC\x66\xAA\x37\xF2\x37\x39\x6B"
+ "\xBC\x08\x3A\xA2\x29\xB3\xDF\xD1",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec serpent_ctr_enc_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\x84\x68\xEC\xF2\x1C\x88\x20\xCA"
+ "\x37\x69\xE3\x3A\x22\x85\x48\x46"
+ "\x70\xAA\x25\xB4\xCD\x8B\x04\x4E"
+ "\x8D\x15\x2B\x98\xDF\x7B\x6D\xB9"
+ "\xE0\x4A\x73\x00\x65\xB6\x1A\x0D"
+ "\x5C\x60\xDF\x34\xDC\x60\x4C\xDF"
+ "\xB5\x1F\x26\x8C\xDA\xC1\x11\xA8"
+ "\x80\xFA\x37\x7A\x89\xAA\xAE\x7B"
+ "\x92\x6E\xB9\xDC\xC9\x62\x4F\x88"
+ "\x0A\x5D\x97\x2F\x6B\xAC\x03\x7C"
+ "\x22\xF6\x55\x5A\xFA\x35\xA5\x17"
+ "\xA1\x5C\x5E\x2B\x63\x2D\xB9\x91"
+ "\x3E\x83\x26\x00\x4E\xD5\xBE\xCE"
+ "\x79\xC4\x3D\xFC\x70\xA0\xAD\x96"
+ "\xBA\x58\x2A\x1C\xDF\xC2\x3A\xA5"
+ "\x7C\xB5\x12\x89\xED\xBF\xB6\x09"
+ "\x13\x4F\x7D\x61\x3C\x5C\x27\xFC"
+ "\x5D\xE1\x4F\xA1\xEA\xB3\xCA\xB9"
+ "\xE6\xD0\x97\x81\xDE\xD1\xFB\x8A"
+ "\x30\xDB\xA3\x5D\xEC\x25\x0B\x86"
+ "\x71\xC8\xA7\x67\xE8\xBC\x7D\x4C"
+ "\xAE\x82\xD3\x73\x31\x09\xCB\xB3"
+ "\x4D\xD4\xC0\x8A\x2B\xFA\xA6\x55"
+ "\x39\x0A\xBC\x6E\x75\xAB\xC2\xE2"
+ "\x8A\xF2\x26\xCD\x63\x38\x35\xF7"
+ "\xAE\x12\x83\xCD\x8A\x9E\x7E\x4C"
+ "\xFE\x4D\xD7\xCE\x5C\x6E\x4C\xAF"
+ "\xE3\xCD\x76\xA7\x87\xA1\x54\x7C"
+ "\xEC\x32\xC7\x83\x2A\xFF\xF8\xEA"
+ "\x87\xB2\x47\xA3\x9D\xC2\x9C\xA2"
+ "\xB7\x2C\x7C\x1A\x24\xCB\x88\x61"
+ "\xFF\xA7\x1A\x16\x01\xDD\x4B\xFC"
+ "\x2E\xE0\x48\x67\x09\x42\xCC\x91"
+ "\xBE\x20\x38\xC0\x5E\x3B\x95\x00"
+ "\xA1\x96\x66\x0B\x8A\xE9\x9E\xF7"
+ "\x6B\x34\x0A\x51\xC0\x3B\xEB\x71"
+ "\x07\x97\x38\x4B\x5C\x56\x98\x67"
+ "\x78\x9C\xD0\x0E\x2B\xB5\x67\x90"
+ "\x75\xF8\xFE\x6D\x4E\x85\xCC\x0D"
+ "\x18\x06\x15\x9D\x5A\x10\x13\x37"
+ "\xA3\xD6\x68\xA2\xDF\x7E\xC7\x12"
+ "\xC9\x0D\x4D\x91\xB0\x2A\x55\xFF"
+ "\x6F\x73\x13\xDF\x28\xB5\x2A\x2C"
+ "\xE4\xFC\x20\xD9\xF1\x7A\x82\xB1"
+ "\xCB\x57\xB6\x3D\x8C\xF4\x8E\x27"
+ "\x37\xDC\x35\xF3\x79\x01\x53\xA4"
+ "\x7B\x37\xDE\x7C\x04\xAE\x50\xDB"
+ "\x9B\x1E\x8C\x07\xA7\x52\x49\x50"
+ "\x34\x25\x65\xDD\xA9\x8F\x7E\xBD"
+ "\x7A\xC9\x36\xAE\xDE\x21\x48\x64"
+ "\xC2\x02\xBA\xBE\x11\x1E\x3D\x9C"
+ "\x98\x52\xCC\x04\xBD\x5E\x61\x26"
+ "\x10\xD3\x21\xD9\x6E\x25\x98\x77"
+ "\x8E\x98\x63\xF6\xF6\x52\xFB\x13"
+ "\xAA\x30\xF2\xB9\xA4\x43\x53\x39"
+ "\x1C\x97\x07\x7E\x6B\xFF\x3D\x43"
+ "\xA6\x71\x6B\x66\x8F\x58\x3F\x71"
+ "\x90\x47\x40\x92\xE6\x69\xD1\x96"
+ "\x34\xB3\x3B\xE5\x43\xE4\xD5\x56"
+ "\xB2\xE6\x7E\x86\x7A\x12\x17\x5B"
+ "\x30\xF3\x9B\x0D\xFA\x57\xE4\x50"
+ "\x40\x53\x77\x8C\x15\xF8\x8D\x13",
+ .rlen = 496,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59",
+ .ilen = 499,
+ .result = "\x84\x68\xEC\xF2\x1C\x88\x20\xCA"
+ "\x37\x69\xE3\x3A\x22\x85\x48\x46"
+ "\x70\xAA\x25\xB4\xCD\x8B\x04\x4E"
+ "\x8D\x15\x2B\x98\xDF\x7B\x6D\xB9"
+ "\xE0\x4A\x73\x00\x65\xB6\x1A\x0D"
+ "\x5C\x60\xDF\x34\xDC\x60\x4C\xDF"
+ "\xB5\x1F\x26\x8C\xDA\xC1\x11\xA8"
+ "\x80\xFA\x37\x7A\x89\xAA\xAE\x7B"
+ "\x92\x6E\xB9\xDC\xC9\x62\x4F\x88"
+ "\x0A\x5D\x97\x2F\x6B\xAC\x03\x7C"
+ "\x22\xF6\x55\x5A\xFA\x35\xA5\x17"
+ "\xA1\x5C\x5E\x2B\x63\x2D\xB9\x91"
+ "\x3E\x83\x26\x00\x4E\xD5\xBE\xCE"
+ "\x79\xC4\x3D\xFC\x70\xA0\xAD\x96"
+ "\xBA\x58\x2A\x1C\xDF\xC2\x3A\xA5"
+ "\x7C\xB5\x12\x89\xED\xBF\xB6\x09"
+ "\x13\x4F\x7D\x61\x3C\x5C\x27\xFC"
+ "\x5D\xE1\x4F\xA1\xEA\xB3\xCA\xB9"
+ "\xE6\xD0\x97\x81\xDE\xD1\xFB\x8A"
+ "\x30\xDB\xA3\x5D\xEC\x25\x0B\x86"
+ "\x71\xC8\xA7\x67\xE8\xBC\x7D\x4C"
+ "\xAE\x82\xD3\x73\x31\x09\xCB\xB3"
+ "\x4D\xD4\xC0\x8A\x2B\xFA\xA6\x55"
+ "\x39\x0A\xBC\x6E\x75\xAB\xC2\xE2"
+ "\x8A\xF2\x26\xCD\x63\x38\x35\xF7"
+ "\xAE\x12\x83\xCD\x8A\x9E\x7E\x4C"
+ "\xFE\x4D\xD7\xCE\x5C\x6E\x4C\xAF"
+ "\xE3\xCD\x76\xA7\x87\xA1\x54\x7C"
+ "\xEC\x32\xC7\x83\x2A\xFF\xF8\xEA"
+ "\x87\xB2\x47\xA3\x9D\xC2\x9C\xA2"
+ "\xB7\x2C\x7C\x1A\x24\xCB\x88\x61"
+ "\xFF\xA7\x1A\x16\x01\xDD\x4B\xFC"
+ "\x2E\xE0\x48\x67\x09\x42\xCC\x91"
+ "\xBE\x20\x38\xC0\x5E\x3B\x95\x00"
+ "\xA1\x96\x66\x0B\x8A\xE9\x9E\xF7"
+ "\x6B\x34\x0A\x51\xC0\x3B\xEB\x71"
+ "\x07\x97\x38\x4B\x5C\x56\x98\x67"
+ "\x78\x9C\xD0\x0E\x2B\xB5\x67\x90"
+ "\x75\xF8\xFE\x6D\x4E\x85\xCC\x0D"
+ "\x18\x06\x15\x9D\x5A\x10\x13\x37"
+ "\xA3\xD6\x68\xA2\xDF\x7E\xC7\x12"
+ "\xC9\x0D\x4D\x91\xB0\x2A\x55\xFF"
+ "\x6F\x73\x13\xDF\x28\xB5\x2A\x2C"
+ "\xE4\xFC\x20\xD9\xF1\x7A\x82\xB1"
+ "\xCB\x57\xB6\x3D\x8C\xF4\x8E\x27"
+ "\x37\xDC\x35\xF3\x79\x01\x53\xA4"
+ "\x7B\x37\xDE\x7C\x04\xAE\x50\xDB"
+ "\x9B\x1E\x8C\x07\xA7\x52\x49\x50"
+ "\x34\x25\x65\xDD\xA9\x8F\x7E\xBD"
+ "\x7A\xC9\x36\xAE\xDE\x21\x48\x64"
+ "\xC2\x02\xBA\xBE\x11\x1E\x3D\x9C"
+ "\x98\x52\xCC\x04\xBD\x5E\x61\x26"
+ "\x10\xD3\x21\xD9\x6E\x25\x98\x77"
+ "\x8E\x98\x63\xF6\xF6\x52\xFB\x13"
+ "\xAA\x30\xF2\xB9\xA4\x43\x53\x39"
+ "\x1C\x97\x07\x7E\x6B\xFF\x3D\x43"
+ "\xA6\x71\x6B\x66\x8F\x58\x3F\x71"
+ "\x90\x47\x40\x92\xE6\x69\xD1\x96"
+ "\x34\xB3\x3B\xE5\x43\xE4\xD5\x56"
+ "\xB2\xE6\x7E\x86\x7A\x12\x17\x5B"
+ "\x30\xF3\x9B\x0D\xFA\x57\xE4\x50"
+ "\x40\x53\x77\x8C\x15\xF8\x8D\x13"
+ "\x38\xE2\xE5",
+ .rlen = 499,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 499 - 16, 16 },
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
+ "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\x06\x9A\xF8\xB4\x53\x88\x62\xFC"
+ "\x68\xB8\x2E\xDF\xC1\x05\x0F\x3D"
+ "\xAF\x4D\x95\xAE\xC4\xE9\x1C\xDC"
+ "\xF6\x2B\x8F\x90\x89\xF6\x7E\x1A"
+ "\xA6\xB9\xE4\xF4\xFA\xCA\xE5\x7E"
+ "\x71\x28\x06\x4F\xE8\x08\x39\xDA"
+ "\xA5\x0E\xC8\xC0\xB8\x16\xE5\x69"
+ "\xE5\xCA\xEC\x4F\x63\x2C\xC0\x9B"
+ "\x9F\x3E\x39\x79\xF0\xCD\x64\x35"
+ "\x4A\xD3\xC8\xA9\x31\xCD\x48\x5B"
+ "\x92\x3D\x8F\x3F\x96\xBD\xB3\x18"
+ "\x74\x2A\x5D\x29\x3F\x57\x8F\xE2"
+ "\x67\x9A\xE0\xE5\xD4\x4A\xE2\x47"
+ "\xBC\xF6\xEB\x14\xF3\x8C\x20\xC2"
+ "\x7D\xE2\x43\x81\x86\x72\x2E\xB1"
+ "\x39\xF6\x95\xE1\x1F\xCB\x76\x33"
+ "\x5B\x7D\x23\x0F\x3A\x67\x2A\x2F"
+ "\xB9\x37\x9D\xDD\x1F\x16\xA1\x3C"
+ "\x70\xFE\x52\xAA\x93\x3C\xC4\x46"
+ "\xB1\xE5\xFF\xDA\xAF\xE2\x84\xFE"
+ "\x25\x92\xB2\x63\xBD\x49\x77\xB4"
+ "\x22\xA4\x6A\xD5\x04\xE0\x45\x58"
+ "\x1C\x34\x96\x7C\x03\x0C\x13\xA2"
+ "\x05\x22\xE2\xCB\x5A\x35\x03\x09"
+ "\x40\xD2\x82\x05\xCA\x58\x73\xF2"
+ "\x29\x5E\x01\x47\x13\x32\x78\xBE"
+ "\x06\xB0\x51\xDB\x6C\x31\xA0\x1C"
+ "\x74\xBC\x8D\x25\xDF\xF8\x65\xD1"
+ "\x38\x35\x11\x26\x4A\xB4\x06\x32"
+ "\xFA\xD2\x07\x77\xB3\x74\x98\x80"
+ "\x61\x59\xA8\x9F\xF3\x6F\x2A\xBF"
+ "\xE6\xA5\x9A\xC4\x6B\xA6\x49\x6F"
+ "\xBC\x47\xD9\xFB\xC6\xEF\x25\x65"
+ "\x96\xAC\x9F\xE4\x81\x4B\xD8\xBA"
+ "\xD6\x9B\xC9\x6D\x58\x40\x81\x02"
+ "\x73\x44\x4E\x43\x6E\x37\xBB\x11"
+ "\xE3\xF9\xB8\x2F\xEC\x76\x34\xEA"
+ "\x90\xCD\xB7\x2E\x0E\x32\x71\xE8"
+ "\xBB\x4E\x0B\x98\xA4\x17\x17\x5B"
+ "\x07\xB5\x82\x3A\xC4\xE8\x42\x51"
+ "\x5A\x4C\x4E\x7D\xBF\xC4\xC0\x4F"
+ "\x68\xB8\xC6\x4A\x32\x6F\x0B\xD7"
+ "\x85\xED\x6B\xFB\x72\xD2\xA5\x8F"
+ "\xBF\xF9\xAC\x59\x50\xA8\x08\x70"
+ "\xEC\xBD\x0A\xBF\xE5\x87\xA1\xC2"
+ "\x92\x14\x78\xAF\xE8\xEA\x2E\xDD"
+ "\xC1\x03\x9A\xAA\x89\x8B\x32\x46"
+ "\x5B\x18\x27\xBA\x46\xAA\x64\xDE"
+ "\xE3\xD5\xA3\xFC\x7B\x5B\x61\xDB"
+ "\x7E\xDA\xEC\x30\x17\x19\xF8\x80"
+ "\xB5\x5E\x27\xB5\x37\x3A\x1F\x28"
+ "\x07\x73\xC3\x63\xCE\xFF\x8C\xFE"
+ "\x81\x4E\xF8\x24\xF3\xB8\xC7\xE8"
+ "\x16\x9A\xCC\x58\x2F\x88\x1C\x4B"
+ "\xBB\x33\xA2\x73\xF0\x1C\x89\x0E"
+ "\xDC\x34\x27\x89\x98\xCE\x1C\xA2"
+ "\xD8\xB8\x90\xBE\xEC\x72\x28\x13"
+ "\xAC\x7B\xF1\xD0\x7F\x7A\x28\x50"
+ "\xB7\x99\x65\x8A\xC9\xC6\x21\x34"
+ "\x7F\x67\x9D\xB7\x2C\xCC\xF5\x17"
+ "\x2B\x89\xAC\xB0\xD7\x1E\x47\xB0"
+ "\x61\xAF\xD4\x63\x6D\xB8\x2D\x20",
+ .rlen = 496,
+ },
+};
+
+static struct cipher_testvec serpent_ctr_dec_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x84\x68\xEC\xF2\x1C\x88\x20\xCA"
+ "\x37\x69\xE3\x3A\x22\x85\x48\x46"
+ "\x70\xAA\x25\xB4\xCD\x8B\x04\x4E"
+ "\x8D\x15\x2B\x98\xDF\x7B\x6D\xB9"
+ "\xE0\x4A\x73\x00\x65\xB6\x1A\x0D"
+ "\x5C\x60\xDF\x34\xDC\x60\x4C\xDF"
+ "\xB5\x1F\x26\x8C\xDA\xC1\x11\xA8"
+ "\x80\xFA\x37\x7A\x89\xAA\xAE\x7B"
+ "\x92\x6E\xB9\xDC\xC9\x62\x4F\x88"
+ "\x0A\x5D\x97\x2F\x6B\xAC\x03\x7C"
+ "\x22\xF6\x55\x5A\xFA\x35\xA5\x17"
+ "\xA1\x5C\x5E\x2B\x63\x2D\xB9\x91"
+ "\x3E\x83\x26\x00\x4E\xD5\xBE\xCE"
+ "\x79\xC4\x3D\xFC\x70\xA0\xAD\x96"
+ "\xBA\x58\x2A\x1C\xDF\xC2\x3A\xA5"
+ "\x7C\xB5\x12\x89\xED\xBF\xB6\x09"
+ "\x13\x4F\x7D\x61\x3C\x5C\x27\xFC"
+ "\x5D\xE1\x4F\xA1\xEA\xB3\xCA\xB9"
+ "\xE6\xD0\x97\x81\xDE\xD1\xFB\x8A"
+ "\x30\xDB\xA3\x5D\xEC\x25\x0B\x86"
+ "\x71\xC8\xA7\x67\xE8\xBC\x7D\x4C"
+ "\xAE\x82\xD3\x73\x31\x09\xCB\xB3"
+ "\x4D\xD4\xC0\x8A\x2B\xFA\xA6\x55"
+ "\x39\x0A\xBC\x6E\x75\xAB\xC2\xE2"
+ "\x8A\xF2\x26\xCD\x63\x38\x35\xF7"
+ "\xAE\x12\x83\xCD\x8A\x9E\x7E\x4C"
+ "\xFE\x4D\xD7\xCE\x5C\x6E\x4C\xAF"
+ "\xE3\xCD\x76\xA7\x87\xA1\x54\x7C"
+ "\xEC\x32\xC7\x83\x2A\xFF\xF8\xEA"
+ "\x87\xB2\x47\xA3\x9D\xC2\x9C\xA2"
+ "\xB7\x2C\x7C\x1A\x24\xCB\x88\x61"
+ "\xFF\xA7\x1A\x16\x01\xDD\x4B\xFC"
+ "\x2E\xE0\x48\x67\x09\x42\xCC\x91"
+ "\xBE\x20\x38\xC0\x5E\x3B\x95\x00"
+ "\xA1\x96\x66\x0B\x8A\xE9\x9E\xF7"
+ "\x6B\x34\x0A\x51\xC0\x3B\xEB\x71"
+ "\x07\x97\x38\x4B\x5C\x56\x98\x67"
+ "\x78\x9C\xD0\x0E\x2B\xB5\x67\x90"
+ "\x75\xF8\xFE\x6D\x4E\x85\xCC\x0D"
+ "\x18\x06\x15\x9D\x5A\x10\x13\x37"
+ "\xA3\xD6\x68\xA2\xDF\x7E\xC7\x12"
+ "\xC9\x0D\x4D\x91\xB0\x2A\x55\xFF"
+ "\x6F\x73\x13\xDF\x28\xB5\x2A\x2C"
+ "\xE4\xFC\x20\xD9\xF1\x7A\x82\xB1"
+ "\xCB\x57\xB6\x3D\x8C\xF4\x8E\x27"
+ "\x37\xDC\x35\xF3\x79\x01\x53\xA4"
+ "\x7B\x37\xDE\x7C\x04\xAE\x50\xDB"
+ "\x9B\x1E\x8C\x07\xA7\x52\x49\x50"
+ "\x34\x25\x65\xDD\xA9\x8F\x7E\xBD"
+ "\x7A\xC9\x36\xAE\xDE\x21\x48\x64"
+ "\xC2\x02\xBA\xBE\x11\x1E\x3D\x9C"
+ "\x98\x52\xCC\x04\xBD\x5E\x61\x26"
+ "\x10\xD3\x21\xD9\x6E\x25\x98\x77"
+ "\x8E\x98\x63\xF6\xF6\x52\xFB\x13"
+ "\xAA\x30\xF2\xB9\xA4\x43\x53\x39"
+ "\x1C\x97\x07\x7E\x6B\xFF\x3D\x43"
+ "\xA6\x71\x6B\x66\x8F\x58\x3F\x71"
+ "\x90\x47\x40\x92\xE6\x69\xD1\x96"
+ "\x34\xB3\x3B\xE5\x43\xE4\xD5\x56"
+ "\xB2\xE6\x7E\x86\x7A\x12\x17\x5B"
+ "\x30\xF3\x9B\x0D\xFA\x57\xE4\x50"
+ "\x40\x53\x77\x8C\x15\xF8\x8D\x13",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x84\x68\xEC\xF2\x1C\x88\x20\xCA"
+ "\x37\x69\xE3\x3A\x22\x85\x48\x46"
+ "\x70\xAA\x25\xB4\xCD\x8B\x04\x4E"
+ "\x8D\x15\x2B\x98\xDF\x7B\x6D\xB9"
+ "\xE0\x4A\x73\x00\x65\xB6\x1A\x0D"
+ "\x5C\x60\xDF\x34\xDC\x60\x4C\xDF"
+ "\xB5\x1F\x26\x8C\xDA\xC1\x11\xA8"
+ "\x80\xFA\x37\x7A\x89\xAA\xAE\x7B"
+ "\x92\x6E\xB9\xDC\xC9\x62\x4F\x88"
+ "\x0A\x5D\x97\x2F\x6B\xAC\x03\x7C"
+ "\x22\xF6\x55\x5A\xFA\x35\xA5\x17"
+ "\xA1\x5C\x5E\x2B\x63\x2D\xB9\x91"
+ "\x3E\x83\x26\x00\x4E\xD5\xBE\xCE"
+ "\x79\xC4\x3D\xFC\x70\xA0\xAD\x96"
+ "\xBA\x58\x2A\x1C\xDF\xC2\x3A\xA5"
+ "\x7C\xB5\x12\x89\xED\xBF\xB6\x09"
+ "\x13\x4F\x7D\x61\x3C\x5C\x27\xFC"
+ "\x5D\xE1\x4F\xA1\xEA\xB3\xCA\xB9"
+ "\xE6\xD0\x97\x81\xDE\xD1\xFB\x8A"
+ "\x30\xDB\xA3\x5D\xEC\x25\x0B\x86"
+ "\x71\xC8\xA7\x67\xE8\xBC\x7D\x4C"
+ "\xAE\x82\xD3\x73\x31\x09\xCB\xB3"
+ "\x4D\xD4\xC0\x8A\x2B\xFA\xA6\x55"
+ "\x39\x0A\xBC\x6E\x75\xAB\xC2\xE2"
+ "\x8A\xF2\x26\xCD\x63\x38\x35\xF7"
+ "\xAE\x12\x83\xCD\x8A\x9E\x7E\x4C"
+ "\xFE\x4D\xD7\xCE\x5C\x6E\x4C\xAF"
+ "\xE3\xCD\x76\xA7\x87\xA1\x54\x7C"
+ "\xEC\x32\xC7\x83\x2A\xFF\xF8\xEA"
+ "\x87\xB2\x47\xA3\x9D\xC2\x9C\xA2"
+ "\xB7\x2C\x7C\x1A\x24\xCB\x88\x61"
+ "\xFF\xA7\x1A\x16\x01\xDD\x4B\xFC"
+ "\x2E\xE0\x48\x67\x09\x42\xCC\x91"
+ "\xBE\x20\x38\xC0\x5E\x3B\x95\x00"
+ "\xA1\x96\x66\x0B\x8A\xE9\x9E\xF7"
+ "\x6B\x34\x0A\x51\xC0\x3B\xEB\x71"
+ "\x07\x97\x38\x4B\x5C\x56\x98\x67"
+ "\x78\x9C\xD0\x0E\x2B\xB5\x67\x90"
+ "\x75\xF8\xFE\x6D\x4E\x85\xCC\x0D"
+ "\x18\x06\x15\x9D\x5A\x10\x13\x37"
+ "\xA3\xD6\x68\xA2\xDF\x7E\xC7\x12"
+ "\xC9\x0D\x4D\x91\xB0\x2A\x55\xFF"
+ "\x6F\x73\x13\xDF\x28\xB5\x2A\x2C"
+ "\xE4\xFC\x20\xD9\xF1\x7A\x82\xB1"
+ "\xCB\x57\xB6\x3D\x8C\xF4\x8E\x27"
+ "\x37\xDC\x35\xF3\x79\x01\x53\xA4"
+ "\x7B\x37\xDE\x7C\x04\xAE\x50\xDB"
+ "\x9B\x1E\x8C\x07\xA7\x52\x49\x50"
+ "\x34\x25\x65\xDD\xA9\x8F\x7E\xBD"
+ "\x7A\xC9\x36\xAE\xDE\x21\x48\x64"
+ "\xC2\x02\xBA\xBE\x11\x1E\x3D\x9C"
+ "\x98\x52\xCC\x04\xBD\x5E\x61\x26"
+ "\x10\xD3\x21\xD9\x6E\x25\x98\x77"
+ "\x8E\x98\x63\xF6\xF6\x52\xFB\x13"
+ "\xAA\x30\xF2\xB9\xA4\x43\x53\x39"
+ "\x1C\x97\x07\x7E\x6B\xFF\x3D\x43"
+ "\xA6\x71\x6B\x66\x8F\x58\x3F\x71"
+ "\x90\x47\x40\x92\xE6\x69\xD1\x96"
+ "\x34\xB3\x3B\xE5\x43\xE4\xD5\x56"
+ "\xB2\xE6\x7E\x86\x7A\x12\x17\x5B"
+ "\x30\xF3\x9B\x0D\xFA\x57\xE4\x50"
+ "\x40\x53\x77\x8C\x15\xF8\x8D\x13"
+ "\x38\xE2\xE5",
+ .ilen = 499,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59",
+ .rlen = 499,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 499 - 16, 16 },
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
+ "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x06\x9A\xF8\xB4\x53\x88\x62\xFC"
+ "\x68\xB8\x2E\xDF\xC1\x05\x0F\x3D"
+ "\xAF\x4D\x95\xAE\xC4\xE9\x1C\xDC"
+ "\xF6\x2B\x8F\x90\x89\xF6\x7E\x1A"
+ "\xA6\xB9\xE4\xF4\xFA\xCA\xE5\x7E"
+ "\x71\x28\x06\x4F\xE8\x08\x39\xDA"
+ "\xA5\x0E\xC8\xC0\xB8\x16\xE5\x69"
+ "\xE5\xCA\xEC\x4F\x63\x2C\xC0\x9B"
+ "\x9F\x3E\x39\x79\xF0\xCD\x64\x35"
+ "\x4A\xD3\xC8\xA9\x31\xCD\x48\x5B"
+ "\x92\x3D\x8F\x3F\x96\xBD\xB3\x18"
+ "\x74\x2A\x5D\x29\x3F\x57\x8F\xE2"
+ "\x67\x9A\xE0\xE5\xD4\x4A\xE2\x47"
+ "\xBC\xF6\xEB\x14\xF3\x8C\x20\xC2"
+ "\x7D\xE2\x43\x81\x86\x72\x2E\xB1"
+ "\x39\xF6\x95\xE1\x1F\xCB\x76\x33"
+ "\x5B\x7D\x23\x0F\x3A\x67\x2A\x2F"
+ "\xB9\x37\x9D\xDD\x1F\x16\xA1\x3C"
+ "\x70\xFE\x52\xAA\x93\x3C\xC4\x46"
+ "\xB1\xE5\xFF\xDA\xAF\xE2\x84\xFE"
+ "\x25\x92\xB2\x63\xBD\x49\x77\xB4"
+ "\x22\xA4\x6A\xD5\x04\xE0\x45\x58"
+ "\x1C\x34\x96\x7C\x03\x0C\x13\xA2"
+ "\x05\x22\xE2\xCB\x5A\x35\x03\x09"
+ "\x40\xD2\x82\x05\xCA\x58\x73\xF2"
+ "\x29\x5E\x01\x47\x13\x32\x78\xBE"
+ "\x06\xB0\x51\xDB\x6C\x31\xA0\x1C"
+ "\x74\xBC\x8D\x25\xDF\xF8\x65\xD1"
+ "\x38\x35\x11\x26\x4A\xB4\x06\x32"
+ "\xFA\xD2\x07\x77\xB3\x74\x98\x80"
+ "\x61\x59\xA8\x9F\xF3\x6F\x2A\xBF"
+ "\xE6\xA5\x9A\xC4\x6B\xA6\x49\x6F"
+ "\xBC\x47\xD9\xFB\xC6\xEF\x25\x65"
+ "\x96\xAC\x9F\xE4\x81\x4B\xD8\xBA"
+ "\xD6\x9B\xC9\x6D\x58\x40\x81\x02"
+ "\x73\x44\x4E\x43\x6E\x37\xBB\x11"
+ "\xE3\xF9\xB8\x2F\xEC\x76\x34\xEA"
+ "\x90\xCD\xB7\x2E\x0E\x32\x71\xE8"
+ "\xBB\x4E\x0B\x98\xA4\x17\x17\x5B"
+ "\x07\xB5\x82\x3A\xC4\xE8\x42\x51"
+ "\x5A\x4C\x4E\x7D\xBF\xC4\xC0\x4F"
+ "\x68\xB8\xC6\x4A\x32\x6F\x0B\xD7"
+ "\x85\xED\x6B\xFB\x72\xD2\xA5\x8F"
+ "\xBF\xF9\xAC\x59\x50\xA8\x08\x70"
+ "\xEC\xBD\x0A\xBF\xE5\x87\xA1\xC2"
+ "\x92\x14\x78\xAF\xE8\xEA\x2E\xDD"
+ "\xC1\x03\x9A\xAA\x89\x8B\x32\x46"
+ "\x5B\x18\x27\xBA\x46\xAA\x64\xDE"
+ "\xE3\xD5\xA3\xFC\x7B\x5B\x61\xDB"
+ "\x7E\xDA\xEC\x30\x17\x19\xF8\x80"
+ "\xB5\x5E\x27\xB5\x37\x3A\x1F\x28"
+ "\x07\x73\xC3\x63\xCE\xFF\x8C\xFE"
+ "\x81\x4E\xF8\x24\xF3\xB8\xC7\xE8"
+ "\x16\x9A\xCC\x58\x2F\x88\x1C\x4B"
+ "\xBB\x33\xA2\x73\xF0\x1C\x89\x0E"
+ "\xDC\x34\x27\x89\x98\xCE\x1C\xA2"
+ "\xD8\xB8\x90\xBE\xEC\x72\x28\x13"
+ "\xAC\x7B\xF1\xD0\x7F\x7A\x28\x50"
+ "\xB7\x99\x65\x8A\xC9\xC6\x21\x34"
+ "\x7F\x67\x9D\xB7\x2C\xCC\xF5\x17"
+ "\x2B\x89\xAC\xB0\xD7\x1E\x47\xB0"
+ "\x61\xAF\xD4\x63\x6D\xB8\x2D\x20",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ },
+};
+
+static struct cipher_testvec serpent_lrw_enc_tv_template[] = {
+ /* Generated from AES-LRW test vectors */
+ {
+ .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d"
+ "\x4c\x26\x84\x14\xb5\x68\x01\x85"
+ "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03"
+ "\xee\x5a\x83\x0c\xcc\x09\x4c\x87",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x6f\xbf\xd4\xa4\x5d\x71\x16\x79"
+ "\x63\x9c\xa6\x8e\x40\xbe\x0d\x8a",
+ .rlen = 16,
+ }, {
+ .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c"
+ "\xd7\x79\xe8\x0f\x54\x88\x79\x44"
+ "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea"
+ "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x02",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\xfd\xb2\x66\x98\x80\x96\x55\xad"
+ "\x08\x94\x54\x9c\x21\x7c\x69\xe3",
+ .rlen = 16,
+ }, {
+ .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50"
+ "\x30\xfe\x69\xe2\x37\x7f\x98\x47"
+ "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6"
+ "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x14\x5e\x3d\x70\xc0\x6e\x9c\x34"
+ "\x5b\x5e\xcf\x0f\xe4\x8c\x21\x5c",
+ .rlen = 16,
+ }, {
+ .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15"
+ "\x25\x83\xf7\x3c\x1f\x01\x28\x74"
+ "\xca\xc6\xbc\x35\x4d\x4a\x65\x54"
+ "\x90\xae\x61\xcf\x7b\xae\xbd\xcc"
+ "\xad\xe4\x94\xc5\x4a\x29\xae\x70",
+ .klen = 40,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x25\x39\xaa\xa5\xf0\x65\xc8\xdc"
+ "\x5d\x45\x95\x30\x8f\xff\x2f\x1b",
+ .rlen = 16,
+ }, {
+ .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff"
+ "\xf8\x86\xce\xac\x93\xc5\xad\xc6"
+ "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd"
+ "\x52\x13\xb2\xb7\xf0\xff\x11\xd8"
+ "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f",
+ .klen = 40,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x0c\x20\x20\x63\xd6\x8b\xfc\x8f"
+ "\xc0\xe2\x17\xbb\xd2\x59\x6f\x26",
+ .rlen = 16,
+ }, {
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\xc1\x35\x2e\x53\xf0\x96\x4d\x9c"
+ "\x2e\x18\xe6\x99\xcd\xd3\x15\x68",
+ .rlen = 16,
+ }, {
+ .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d"
+ "\xd4\x70\x98\x0b\xc7\x95\x84\xc8"
+ "\xb2\xfb\x64\xce\x60\x97\x87\x8d"
+ "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7"
+ "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4"
+ "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x86\x0a\xc6\xa9\x1a\x9f\xe7\xe6"
+ "\x64\x3b\x33\xd6\xd5\x84\xd6\xdf",
+ .rlen = 16,
+ }, {
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x05\x11\xb7\x18\xab\xc6\x2d\xac"
+ "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c"
+ "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8"
+ "\x50\x38\x1f\x71\x49\xb6\x57\xd6"
+ "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90"
+ "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6"
+ "\xad\x1e\x9e\x20\x5f\x38\xbe\x04"
+ "\xda\x10\x8e\xed\xa2\xa4\x87\xab"
+ "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c"
+ "\xc9\xac\x42\x31\x95\x7c\xc9\x04"
+ "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6"
+ "\x15\xd7\x3f\x4f\x2f\x66\x69\x03"
+ "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65"
+ "\x4c\x96\x12\xed\x7c\x92\x03\x01"
+ "\x6f\xbc\x35\x93\xac\xf1\x27\xf1"
+ "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50"
+ "\x89\xa4\x8e\x66\x44\x85\xcc\xfd"
+ "\x33\x14\x70\xe3\x96\xb2\xc3\xd3"
+ "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5"
+ "\x2d\x64\x75\xdd\xb4\x54\xe6\x74"
+ "\x8c\xd3\x9d\x9e\x86\xab\x51\x53"
+ "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40"
+ "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5"
+ "\x76\x12\x73\x44\x1a\x56\xd7\x72"
+ "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda"
+ "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd"
+ "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60"
+ "\x1a\xe2\x70\x85\x58\xc2\x1b\x09"
+ "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9"
+ "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8"
+ "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8"
+ "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10"
+ "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1"
+ "\x90\x3e\x76\x4a\x74\xa4\x21\x2c"
+ "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e"
+ "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f"
+ "\x8d\x23\x31\x74\x84\xeb\x88\x6e"
+ "\xcc\xb9\xbc\x22\x83\x19\x07\x22"
+ "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78"
+ "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5"
+ "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41"
+ "\x3c\xce\x8f\x42\x60\x71\xa7\x75"
+ "\x08\x40\x65\x8a\x82\xbf\xf5\x43"
+ "\x71\x96\xa9\x4d\x44\x8a\x20\xbe"
+ "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65"
+ "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9"
+ "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4"
+ "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a"
+ "\x62\x73\x65\xfd\x46\x63\x25\x3d"
+ "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf"
+ "\x24\xf3\xb4\xac\x64\xba\xdf\x4b"
+ "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7"
+ "\xc5\x68\x77\x84\x32\x2b\xcc\x85"
+ "\x74\x96\xf0\x12\x77\x61\xb9\xeb"
+ "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8"
+ "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24"
+ "\xda\x39\x87\x45\xc0\x2b\xbb\x01"
+ "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce"
+ "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6"
+ "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32"
+ "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45"
+ "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6"
+ "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4"
+ "\x21\xc4\xc2\x75\x67\x89\x37\x0a",
+ .ilen = 512,
+ .result = "\xe3\x5a\x38\x0f\x4d\x92\x3a\x74"
+ "\x15\xb1\x50\x8c\x9a\xd8\x99\x1d"
+ "\x82\xec\xf1\x5f\x03\x6d\x02\x58"
+ "\x90\x67\xfc\xdd\x8d\xe1\x38\x08"
+ "\x7b\xc9\x9b\x4b\x04\x09\x50\x15"
+ "\xce\xab\xda\x33\x30\x20\x12\xfa"
+ "\x83\xc4\xa6\x9a\x2e\x7d\x90\xd9"
+ "\xa6\xa6\x67\x43\xb4\xa7\xa8\x5c"
+ "\xbb\x6a\x49\x2b\x8b\xf8\xd0\x22"
+ "\xe5\x9e\xba\xe8\x8c\x67\xb8\x5b"
+ "\x60\xbc\xf5\xa4\x95\x4e\x66\xe5"
+ "\x6d\x8e\xa9\xf6\x65\x2e\x04\xf5"
+ "\xba\xb5\xdb\x88\xc2\xf6\x7a\x4b"
+ "\x89\x58\x7c\x9a\xae\x26\xe8\xb7"
+ "\xb7\x28\xcc\xd6\xcc\xa5\x98\x4d"
+ "\xb9\x91\xcb\xb4\xe4\x8b\x96\x47"
+ "\x5f\x03\x8b\xdd\x94\xd1\xee\x12"
+ "\xa7\x83\x80\xf2\xc1\x15\x74\x4f"
+ "\x49\xf9\xb0\x7e\x6f\xdc\x73\x2f"
+ "\xe2\xcf\xe0\x1b\x34\xa5\xa0\x52"
+ "\xfb\x3c\x5d\x85\x91\xe6\x6d\x98"
+ "\x04\xd6\xdd\x4c\x00\x64\xd9\x54"
+ "\x5c\x3c\x08\x1d\x4c\x06\x9f\xb8"
+ "\x1c\x4d\x8d\xdc\xa4\x3c\xb9\x3b"
+ "\x9e\x85\xce\xc3\xa8\x4a\x0c\xd9"
+ "\x04\xc3\x6f\x17\x66\xa9\x1f\x59"
+ "\xd9\xe2\x19\x36\xa3\x88\xb8\x0b"
+ "\x0f\x4a\x4d\xf8\xc8\x6f\xd5\x43"
+ "\xeb\xa0\xab\x1f\x61\xc0\x06\xeb"
+ "\x93\xb7\xb8\x6f\x0d\xbd\x07\x49"
+ "\xb3\xac\x5d\xcf\x31\xa0\x27\x26"
+ "\x21\xbe\x94\x2e\x19\xea\xf4\xee"
+ "\xb5\x13\x89\xf7\x94\x0b\xef\x59"
+ "\x44\xc5\x78\x8b\x3c\x3b\x71\x20"
+ "\xf9\x35\x0c\x70\x74\xdc\x5b\xc2"
+ "\xb4\x11\x0e\x2c\x61\xa1\x52\x46"
+ "\x18\x11\x16\xc6\x86\x44\xa7\xaf"
+ "\xd5\x0c\x7d\xa6\x9e\x25\x2d\x1b"
+ "\x9a\x8f\x0f\xf8\x6a\x61\xa0\xea"
+ "\x3f\x0e\x90\xd6\x8f\x83\x30\x64"
+ "\xb5\x51\x2d\x08\x3c\xcd\x99\x36"
+ "\x96\xd4\xb1\xb5\x48\x30\xca\x48"
+ "\xf7\x11\xa8\xf5\x97\x8a\x6a\x6d"
+ "\x12\x33\x2f\xc0\xe8\xda\xec\x8a"
+ "\xe1\x88\x72\x63\xde\x20\xa3\xe1"
+ "\x8e\xac\x84\x37\x35\xf5\xf7\x3f"
+ "\x00\x02\x0e\xe4\xc1\x53\x68\x3f"
+ "\xaa\xd5\xac\x52\x3d\x20\x2f\x4d"
+ "\x7c\x83\xd0\xbd\xaa\x97\x35\x36"
+ "\x98\x88\x59\x5d\xe7\x24\xe3\x90"
+ "\x9d\x30\x47\xa7\xc3\x60\x35\xf4"
+ "\xd5\xdb\x0e\x4d\x44\xc1\x81\x8b"
+ "\xfd\xbd\xc3\x2b\xba\x68\xfe\x8d"
+ "\x49\x5a\x3c\x8a\xa3\x01\xae\x25"
+ "\x42\xab\xd2\x87\x1b\x35\xd6\xd2"
+ "\xd7\x70\x1c\x1f\x72\xd1\xe1\x39"
+ "\x1c\x58\xa2\xb4\xd0\x78\x55\x72"
+ "\x76\x59\xea\xd9\xd7\x6e\x63\x8b"
+ "\xcc\x9b\xa7\x74\x89\xfc\xa3\x68"
+ "\x86\x28\xd1\xbb\x54\x8d\x66\xad"
+ "\x2a\x92\xf9\x4e\x04\x3d\xae\xfd"
+ "\x1b\x2b\x7f\xc3\x2f\x1a\x78\x0a"
+ "\x5c\xc6\x84\xfe\x7c\xcb\x26\xfd"
+ "\xd9\x51\x0f\xd7\x94\x2f\xc5\xa7",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec serpent_lrw_dec_tv_template[] = {
+ /* Generated from AES-LRW test vectors */
+ /* same as enc vectors with input and result reversed */
+ {
+ .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d"
+ "\x4c\x26\x84\x14\xb5\x68\x01\x85"
+ "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03"
+ "\xee\x5a\x83\x0c\xcc\x09\x4c\x87",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x6f\xbf\xd4\xa4\x5d\x71\x16\x79"
+ "\x63\x9c\xa6\x8e\x40\xbe\x0d\x8a",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c"
+ "\xd7\x79\xe8\x0f\x54\x88\x79\x44"
+ "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea"
+ "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x02",
+ .input = "\xfd\xb2\x66\x98\x80\x96\x55\xad"
+ "\x08\x94\x54\x9c\x21\x7c\x69\xe3",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50"
+ "\x30\xfe\x69\xe2\x37\x7f\x98\x47"
+ "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6"
+ "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x14\x5e\x3d\x70\xc0\x6e\x9c\x34"
+ "\x5b\x5e\xcf\x0f\xe4\x8c\x21\x5c",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15"
+ "\x25\x83\xf7\x3c\x1f\x01\x28\x74"
+ "\xca\xc6\xbc\x35\x4d\x4a\x65\x54"
+ "\x90\xae\x61\xcf\x7b\xae\xbd\xcc"
+ "\xad\xe4\x94\xc5\x4a\x29\xae\x70",
+ .klen = 40,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x25\x39\xaa\xa5\xf0\x65\xc8\xdc"
+ "\x5d\x45\x95\x30\x8f\xff\x2f\x1b",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff"
+ "\xf8\x86\xce\xac\x93\xc5\xad\xc6"
+ "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd"
+ "\x52\x13\xb2\xb7\xf0\xff\x11\xd8"
+ "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f",
+ .klen = 40,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x0c\x20\x20\x63\xd6\x8b\xfc\x8f"
+ "\xc0\xe2\x17\xbb\xd2\x59\x6f\x26",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\xc1\x35\x2e\x53\xf0\x96\x4d\x9c"
+ "\x2e\x18\xe6\x99\xcd\xd3\x15\x68",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d"
+ "\xd4\x70\x98\x0b\xc7\x95\x84\xc8"
+ "\xb2\xfb\x64\xce\x60\x97\x87\x8d"
+ "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7"
+ "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4"
+ "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x86\x0a\xc6\xa9\x1a\x9f\xe7\xe6"
+ "\x64\x3b\x33\xd6\xd5\x84\xd6\xdf",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\xe3\x5a\x38\x0f\x4d\x92\x3a\x74"
+ "\x15\xb1\x50\x8c\x9a\xd8\x99\x1d"
+ "\x82\xec\xf1\x5f\x03\x6d\x02\x58"
+ "\x90\x67\xfc\xdd\x8d\xe1\x38\x08"
+ "\x7b\xc9\x9b\x4b\x04\x09\x50\x15"
+ "\xce\xab\xda\x33\x30\x20\x12\xfa"
+ "\x83\xc4\xa6\x9a\x2e\x7d\x90\xd9"
+ "\xa6\xa6\x67\x43\xb4\xa7\xa8\x5c"
+ "\xbb\x6a\x49\x2b\x8b\xf8\xd0\x22"
+ "\xe5\x9e\xba\xe8\x8c\x67\xb8\x5b"
+ "\x60\xbc\xf5\xa4\x95\x4e\x66\xe5"
+ "\x6d\x8e\xa9\xf6\x65\x2e\x04\xf5"
+ "\xba\xb5\xdb\x88\xc2\xf6\x7a\x4b"
+ "\x89\x58\x7c\x9a\xae\x26\xe8\xb7"
+ "\xb7\x28\xcc\xd6\xcc\xa5\x98\x4d"
+ "\xb9\x91\xcb\xb4\xe4\x8b\x96\x47"
+ "\x5f\x03\x8b\xdd\x94\xd1\xee\x12"
+ "\xa7\x83\x80\xf2\xc1\x15\x74\x4f"
+ "\x49\xf9\xb0\x7e\x6f\xdc\x73\x2f"
+ "\xe2\xcf\xe0\x1b\x34\xa5\xa0\x52"
+ "\xfb\x3c\x5d\x85\x91\xe6\x6d\x98"
+ "\x04\xd6\xdd\x4c\x00\x64\xd9\x54"
+ "\x5c\x3c\x08\x1d\x4c\x06\x9f\xb8"
+ "\x1c\x4d\x8d\xdc\xa4\x3c\xb9\x3b"
+ "\x9e\x85\xce\xc3\xa8\x4a\x0c\xd9"
+ "\x04\xc3\x6f\x17\x66\xa9\x1f\x59"
+ "\xd9\xe2\x19\x36\xa3\x88\xb8\x0b"
+ "\x0f\x4a\x4d\xf8\xc8\x6f\xd5\x43"
+ "\xeb\xa0\xab\x1f\x61\xc0\x06\xeb"
+ "\x93\xb7\xb8\x6f\x0d\xbd\x07\x49"
+ "\xb3\xac\x5d\xcf\x31\xa0\x27\x26"
+ "\x21\xbe\x94\x2e\x19\xea\xf4\xee"
+ "\xb5\x13\x89\xf7\x94\x0b\xef\x59"
+ "\x44\xc5\x78\x8b\x3c\x3b\x71\x20"
+ "\xf9\x35\x0c\x70\x74\xdc\x5b\xc2"
+ "\xb4\x11\x0e\x2c\x61\xa1\x52\x46"
+ "\x18\x11\x16\xc6\x86\x44\xa7\xaf"
+ "\xd5\x0c\x7d\xa6\x9e\x25\x2d\x1b"
+ "\x9a\x8f\x0f\xf8\x6a\x61\xa0\xea"
+ "\x3f\x0e\x90\xd6\x8f\x83\x30\x64"
+ "\xb5\x51\x2d\x08\x3c\xcd\x99\x36"
+ "\x96\xd4\xb1\xb5\x48\x30\xca\x48"
+ "\xf7\x11\xa8\xf5\x97\x8a\x6a\x6d"
+ "\x12\x33\x2f\xc0\xe8\xda\xec\x8a"
+ "\xe1\x88\x72\x63\xde\x20\xa3\xe1"
+ "\x8e\xac\x84\x37\x35\xf5\xf7\x3f"
+ "\x00\x02\x0e\xe4\xc1\x53\x68\x3f"
+ "\xaa\xd5\xac\x52\x3d\x20\x2f\x4d"
+ "\x7c\x83\xd0\xbd\xaa\x97\x35\x36"
+ "\x98\x88\x59\x5d\xe7\x24\xe3\x90"
+ "\x9d\x30\x47\xa7\xc3\x60\x35\xf4"
+ "\xd5\xdb\x0e\x4d\x44\xc1\x81\x8b"
+ "\xfd\xbd\xc3\x2b\xba\x68\xfe\x8d"
+ "\x49\x5a\x3c\x8a\xa3\x01\xae\x25"
+ "\x42\xab\xd2\x87\x1b\x35\xd6\xd2"
+ "\xd7\x70\x1c\x1f\x72\xd1\xe1\x39"
+ "\x1c\x58\xa2\xb4\xd0\x78\x55\x72"
+ "\x76\x59\xea\xd9\xd7\x6e\x63\x8b"
+ "\xcc\x9b\xa7\x74\x89\xfc\xa3\x68"
+ "\x86\x28\xd1\xbb\x54\x8d\x66\xad"
+ "\x2a\x92\xf9\x4e\x04\x3d\xae\xfd"
+ "\x1b\x2b\x7f\xc3\x2f\x1a\x78\x0a"
+ "\x5c\xc6\x84\xfe\x7c\xcb\x26\xfd"
+ "\xd9\x51\x0f\xd7\x94\x2f\xc5\xa7",
+ .ilen = 512,
+ .result = "\x05\x11\xb7\x18\xab\xc6\x2d\xac"
+ "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c"
+ "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8"
+ "\x50\x38\x1f\x71\x49\xb6\x57\xd6"
+ "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90"
+ "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6"
+ "\xad\x1e\x9e\x20\x5f\x38\xbe\x04"
+ "\xda\x10\x8e\xed\xa2\xa4\x87\xab"
+ "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c"
+ "\xc9\xac\x42\x31\x95\x7c\xc9\x04"
+ "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6"
+ "\x15\xd7\x3f\x4f\x2f\x66\x69\x03"
+ "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65"
+ "\x4c\x96\x12\xed\x7c\x92\x03\x01"
+ "\x6f\xbc\x35\x93\xac\xf1\x27\xf1"
+ "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50"
+ "\x89\xa4\x8e\x66\x44\x85\xcc\xfd"
+ "\x33\x14\x70\xe3\x96\xb2\xc3\xd3"
+ "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5"
+ "\x2d\x64\x75\xdd\xb4\x54\xe6\x74"
+ "\x8c\xd3\x9d\x9e\x86\xab\x51\x53"
+ "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40"
+ "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5"
+ "\x76\x12\x73\x44\x1a\x56\xd7\x72"
+ "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda"
+ "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd"
+ "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60"
+ "\x1a\xe2\x70\x85\x58\xc2\x1b\x09"
+ "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9"
+ "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8"
+ "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8"
+ "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10"
+ "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1"
+ "\x90\x3e\x76\x4a\x74\xa4\x21\x2c"
+ "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e"
+ "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f"
+ "\x8d\x23\x31\x74\x84\xeb\x88\x6e"
+ "\xcc\xb9\xbc\x22\x83\x19\x07\x22"
+ "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78"
+ "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5"
+ "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41"
+ "\x3c\xce\x8f\x42\x60\x71\xa7\x75"
+ "\x08\x40\x65\x8a\x82\xbf\xf5\x43"
+ "\x71\x96\xa9\x4d\x44\x8a\x20\xbe"
+ "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65"
+ "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9"
+ "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4"
+ "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a"
+ "\x62\x73\x65\xfd\x46\x63\x25\x3d"
+ "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf"
+ "\x24\xf3\xb4\xac\x64\xba\xdf\x4b"
+ "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7"
+ "\xc5\x68\x77\x84\x32\x2b\xcc\x85"
+ "\x74\x96\xf0\x12\x77\x61\xb9\xeb"
+ "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8"
+ "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24"
+ "\xda\x39\x87\x45\xc0\x2b\xbb\x01"
+ "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce"
+ "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6"
+ "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32"
+ "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45"
+ "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6"
+ "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4"
+ "\x21\xc4\xc2\x75\x67\x89\x37\x0a",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec serpent_xts_enc_tv_template[] = {
+ /* Generated from AES-XTS test vectors */
+ {
+ .key = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .ilen = 32,
+ .result = "\xe1\x08\xb8\x1d\x2c\xf5\x33\x64"
+ "\xc8\x12\x04\xc7\xb3\x70\xe8\xc4"
+ "\x6a\x31\xc5\xf3\x00\xca\xb9\x16"
+ "\xde\xe2\x77\x66\xf7\xfe\x62\x08",
+ .rlen = 32,
+ }, {
+ .key = "\x11\x11\x11\x11\x11\x11\x11\x11"
+ "\x11\x11\x11\x11\x11\x11\x11\x11"
+ "\x22\x22\x22\x22\x22\x22\x22\x22"
+ "\x22\x22\x22\x22\x22\x22\x22\x22",
+ .klen = 32,
+ .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44",
+ .ilen = 32,
+ .result = "\x1a\x0a\x09\x5f\xcd\x07\x07\x98"
+ "\x41\x86\x12\xaf\xb3\xd7\x68\x13"
+ "\xed\x81\xcd\x06\x87\x43\x1a\xbb"
+ "\x13\x3d\xd6\x1e\x2b\xe1\x77\xbe",
+ .rlen = 32,
+ }, {
+ .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8"
+ "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0"
+ "\x22\x22\x22\x22\x22\x22\x22\x22"
+ "\x22\x22\x22\x22\x22\x22\x22\x22",
+ .klen = 32,
+ .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44",
+ .ilen = 32,
+ .result = "\xf9\x9b\x28\xb8\x5c\xaf\x8c\x61"
+ "\xb6\x1c\x81\x8f\x2c\x87\x60\x89"
+ "\x0d\x8d\x7a\xe8\x60\x48\xcc\x86"
+ "\xc1\x68\x45\xaa\x00\xe9\x24\xc5",
+ .rlen = 32,
+ }, {
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .ilen = 512,
+ .result = "\xfe\x47\x4a\xc8\x60\x7e\xb4\x8b"
+ "\x0d\x10\xf4\xb0\x0d\xba\xf8\x53"
+ "\x65\x6e\x38\x4b\xdb\xaa\xb1\x9e"
+ "\x28\xca\xb0\x22\xb3\x85\x75\xf4"
+ "\x00\x5c\x75\x14\x06\xd6\x25\x82"
+ "\xe6\xcb\x08\xf7\x29\x90\x23\x8e"
+ "\xa4\x68\x57\xe4\xf0\xd8\x32\xf3"
+ "\x80\x51\x67\xb5\x0b\x85\x69\xe8"
+ "\x19\xfe\xc4\xc7\x3e\xea\x90\xd3"
+ "\x8f\xa3\xf2\x0a\xac\x17\x4b\xa0"
+ "\x63\x5a\x16\x0f\xf0\xce\x66\x1f"
+ "\x2c\x21\x07\xf1\xa4\x03\xa3\x44"
+ "\x41\x61\x87\x5d\x6b\xb3\xef\xd4"
+ "\xfc\xaa\x32\x7e\x55\x58\x04\x41"
+ "\xc9\x07\x33\xc6\xa2\x68\xd6\x5a"
+ "\x55\x79\x4b\x6f\xcf\x89\xb9\x19"
+ "\xe5\x54\x13\x15\xb2\x1a\xfa\x15"
+ "\xc2\xf0\x06\x59\xfa\xa0\x25\x05"
+ "\x58\xfa\x43\x91\x16\x85\x40\xbb"
+ "\x0d\x34\x4d\xc5\x1e\x20\xd5\x08"
+ "\xcd\x22\x22\x41\x11\x9f\x6c\x7c"
+ "\x8d\x57\xc9\xba\x57\xe8\x2c\xf7"
+ "\xa0\x42\xa8\xde\xfc\xa3\xca\x98"
+ "\x4b\x43\xb1\xce\x4b\xbf\x01\x67"
+ "\x6e\x29\x60\xbd\x10\x14\x84\x82"
+ "\x83\x82\x0c\x63\x73\x92\x02\x7c"
+ "\x55\x37\x20\x80\x17\x51\xc8\xbc"
+ "\x46\x02\xcb\x38\x07\x6d\xe2\x85"
+ "\xaa\x29\xaf\x24\x58\x0d\xf0\x75"
+ "\x08\x0a\xa5\x34\x25\x16\xf3\x74"
+ "\xa7\x0b\x97\xbe\xc1\xa9\xdc\x29"
+ "\x1a\x0a\x56\xc1\x1a\x91\x97\x8c"
+ "\x0b\xc7\x16\xed\x5a\x22\xa6\x2e"
+ "\x8c\x2b\x4f\x54\x76\x47\x53\x8e"
+ "\xe8\x00\xec\x92\xb9\x55\xe6\xa2"
+ "\xf3\xe2\x4f\x6a\x66\x60\xd0\x87"
+ "\xe6\xd1\xcc\xe3\x6a\xc5\x2d\x21"
+ "\xcc\x9d\x6a\xb6\x75\xaa\xe2\x19"
+ "\x21\x9f\xa1\x5e\x4c\xfd\x72\xf9"
+ "\x94\x4e\x63\xc7\xae\xfc\xed\x47"
+ "\xe2\xfe\x7a\x63\x77\xfe\x97\x82"
+ "\xb1\x10\x6e\x36\x1d\xe1\xc4\x80"
+ "\xec\x69\x41\xec\xa7\x8a\xe0\x2f"
+ "\xe3\x49\x26\xa2\x41\xb2\x08\x0f"
+ "\x28\xb4\xa7\x39\xa1\x99\x2d\x1e"
+ "\x43\x42\x35\xd0\xcf\xec\x77\x67"
+ "\xb2\x3b\x9e\x1c\x35\xde\x4f\x5e"
+ "\x73\x3f\x5d\x6f\x07\x4b\x2e\x50"
+ "\xab\x6c\x6b\xff\xea\x00\x67\xaa"
+ "\x0e\x82\x32\xdd\x3d\xb5\xe5\x76"
+ "\x2b\x77\x3f\xbe\x12\x75\xfb\x92"
+ "\xc6\x89\x67\x4d\xca\xf7\xd4\x50"
+ "\xc0\x74\x47\xcc\xd9\x0a\xd4\xc6"
+ "\x3b\x17\x2e\xe3\x35\xbb\x53\xb5"
+ "\x86\xad\x51\xcc\xd5\x96\xb8\xdc"
+ "\x03\x57\xe6\x98\x52\x2f\x61\x62"
+ "\xc4\x5c\x9c\x36\x71\x07\xfb\x94"
+ "\xe3\x02\xc4\x2b\x08\x75\xc7\x35"
+ "\xfb\x2e\x88\x7b\xbb\x67\x00\xe1"
+ "\xc9\xdd\x99\xb2\x13\x53\x1a\x4e"
+ "\x76\x87\x19\x04\x1a\x2f\x38\x3e"
+ "\xef\x91\x64\x1d\x18\x07\x4e\x31"
+ "\x88\x21\x7c\xb0\xa5\x12\x4c\x3c"
+ "\xb0\x20\xbd\xda\xdf\xf9\x7c\xdd",
+ .rlen = 512,
+ }, {
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x62\x49\x77\x57\x24\x70\x93\x69"
+ "\x99\x59\x57\x49\x66\x96\x76\x27"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95"
+ "\x02\x88\x41\x97\x16\x93\x99\x37"
+ "\x51\x05\x82\x09\x74\x94\x45\x92",
+ .klen = 64,
+ .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .ilen = 512,
+ .result = "\x2b\xc9\xb4\x6b\x10\x94\xa9\x32"
+ "\xaa\xb0\x20\xc6\x44\x3d\x74\x1f"
+ "\x75\x01\xa7\xf6\xf5\xf7\x62\x1b"
+ "\x80\x1b\x82\xcb\x01\x59\x91\x7f"
+ "\x80\x3a\x98\xf0\xd2\xca\xc4\xc3"
+ "\x34\xfd\xe6\x11\xf9\x33\x45\x12"
+ "\x48\xc5\x8c\x25\xf1\xc5\xc5\x23"
+ "\xd3\x44\xb4\x73\xd5\x04\xc0\xb7"
+ "\xca\x2f\xf5\xcd\xc5\xb4\xdd\xb0"
+ "\xf4\x60\xe8\xfb\xc6\x9c\xc5\x78"
+ "\xcd\xec\x7d\xdc\x19\x9c\x72\x64"
+ "\x63\x0b\x38\x2e\x76\xdd\x2d\x36"
+ "\x49\xb0\x1d\xea\x78\x9e\x00\xca"
+ "\x20\xcc\x1b\x1e\x98\x74\xab\xed"
+ "\x79\xf7\xd0\x6c\xd8\x93\x80\x29"
+ "\xac\xa5\x5e\x34\xa9\xab\xa0\x55"
+ "\x9a\xea\xaa\x95\x4d\x7b\xfe\x46"
+ "\x26\x8a\xfd\x88\xa2\xa8\xa6\xae"
+ "\x25\x42\x17\xbf\x76\x8f\x1c\x3d"
+ "\xec\x9a\xda\x64\x96\xb5\x61\xff"
+ "\x99\xeb\x12\x96\x85\x82\x9d\xd5"
+ "\x81\x85\x14\xa8\x59\xac\x8c\x94"
+ "\xbb\x3b\x85\x2b\xdf\xb3\x0c\xba"
+ "\x82\xc6\x4d\xca\x86\xea\x53\x28"
+ "\x4c\xe0\x4e\x31\xe3\x73\x2f\x79"
+ "\x9d\x42\xe1\x03\xe3\x8b\xc4\xff"
+ "\x05\xca\x81\x7b\xda\xa2\xde\x63"
+ "\x3a\x10\xbe\xc2\xac\x32\xc4\x05"
+ "\x47\x7e\xef\x67\xe2\x5f\x5b\xae"
+ "\xed\xf1\x70\x34\x16\x9a\x07\x7b"
+ "\xf2\x25\x2b\xb0\xf8\x3c\x15\x9a"
+ "\xa6\x59\x55\x5f\xc1\xf4\x1e\xcd"
+ "\x93\x1f\x06\xba\xd4\x9a\x22\x69"
+ "\xfa\x8e\x95\x0d\xf3\x23\x59\x2c"
+ "\xfe\x00\xba\xf0\x0e\xbc\x6d\xd6"
+ "\x62\xf0\x7a\x0e\x83\x3e\xdb\x32"
+ "\xfd\x43\x7d\xda\x42\x51\x87\x43"
+ "\x9d\xf9\xef\xf4\x30\x97\xf8\x09"
+ "\x88\xfc\x3f\x93\x70\xc1\x4a\xec"
+ "\x27\x5f\x11\xac\x71\xc7\x48\x46"
+ "\x2f\xf9\xdf\x8d\x9f\xf7\x2e\x56"
+ "\x0d\x4e\xb0\x32\x76\xce\x86\x81"
+ "\xcd\xdf\xe4\x00\xbf\xfd\x5f\x24"
+ "\xaf\xf7\x9a\xde\xff\x18\xac\x14"
+ "\x90\xc5\x01\x39\x34\x0f\x24\xf3"
+ "\x13\x2f\x5e\x4f\x30\x9a\x36\x40"
+ "\xec\xea\xbc\xcd\x9e\x0e\x5b\x23"
+ "\x50\x88\x97\x40\x69\xb1\x37\xf5"
+ "\xc3\x15\xf9\x3f\xb7\x79\x64\xe8"
+ "\x7b\x10\x20\xb9\x2b\x46\x83\x5b"
+ "\xd8\x39\xfc\xe4\xfa\x88\x52\xf2"
+ "\x72\xb0\x97\x4e\x89\xb3\x48\x00"
+ "\xc1\x16\x73\x50\x77\xba\xa6\x65"
+ "\x20\x2d\xb0\x02\x27\x89\xda\x99"
+ "\x45\xfb\xe9\xd3\x1d\x39\x2f\xd6"
+ "\x2a\xda\x09\x12\x11\xaf\xe6\x57"
+ "\x01\x04\x8a\xff\x86\x8b\xac\xf8"
+ "\xee\xe4\x1c\x98\x5b\xcf\x6b\x76"
+ "\xa3\x0e\x33\x74\x40\x18\x39\x72"
+ "\x66\x50\x31\xfd\x70\xdf\xe8\x51"
+ "\x96\x21\x36\xb2\x9b\xfa\x85\xd1"
+ "\x30\x05\xc8\x92\x98\x80\xff\x7a"
+ "\xaf\x43\x0b\xc5\x20\x41\x92\x20"
+ "\xd4\xa0\x91\x98\x11\x5f\x4d\xb1",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec serpent_xts_dec_tv_template[] = {
+ /* Generated from AES-XTS test vectors */
+ /* same as enc vectors with input and result reversed */
+ {
+ .key = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\xe1\x08\xb8\x1d\x2c\xf5\x33\x64"
+ "\xc8\x12\x04\xc7\xb3\x70\xe8\xc4"
+ "\x6a\x31\xc5\xf3\x00\xca\xb9\x16"
+ "\xde\xe2\x77\x66\xf7\xfe\x62\x08",
+ .ilen = 32,
+ .result = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .rlen = 32,
+ }, {
+ .key = "\x11\x11\x11\x11\x11\x11\x11\x11"
+ "\x11\x11\x11\x11\x11\x11\x11\x11"
+ "\x22\x22\x22\x22\x22\x22\x22\x22"
+ "\x22\x22\x22\x22\x22\x22\x22\x22",
+ .klen = 32,
+ .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x1a\x0a\x09\x5f\xcd\x07\x07\x98"
+ "\x41\x86\x12\xaf\xb3\xd7\x68\x13"
+ "\xed\x81\xcd\x06\x87\x43\x1a\xbb"
+ "\x13\x3d\xd6\x1e\x2b\xe1\x77\xbe",
+ .ilen = 32,
+ .result = "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44",
+ .rlen = 32,
+ }, {
+ .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8"
+ "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0"
+ "\x22\x22\x22\x22\x22\x22\x22\x22"
+ "\x22\x22\x22\x22\x22\x22\x22\x22",
+ .klen = 32,
+ .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\xf9\x9b\x28\xb8\x5c\xaf\x8c\x61"
+ "\xb6\x1c\x81\x8f\x2c\x87\x60\x89"
+ "\x0d\x8d\x7a\xe8\x60\x48\xcc\x86"
+ "\xc1\x68\x45\xaa\x00\xe9\x24\xc5",
+ .ilen = 32,
+ .result = "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44",
+ .rlen = 32,
+ }, {
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\xfe\x47\x4a\xc8\x60\x7e\xb4\x8b"
+ "\x0d\x10\xf4\xb0\x0d\xba\xf8\x53"
+ "\x65\x6e\x38\x4b\xdb\xaa\xb1\x9e"
+ "\x28\xca\xb0\x22\xb3\x85\x75\xf4"
+ "\x00\x5c\x75\x14\x06\xd6\x25\x82"
+ "\xe6\xcb\x08\xf7\x29\x90\x23\x8e"
+ "\xa4\x68\x57\xe4\xf0\xd8\x32\xf3"
+ "\x80\x51\x67\xb5\x0b\x85\x69\xe8"
+ "\x19\xfe\xc4\xc7\x3e\xea\x90\xd3"
+ "\x8f\xa3\xf2\x0a\xac\x17\x4b\xa0"
+ "\x63\x5a\x16\x0f\xf0\xce\x66\x1f"
+ "\x2c\x21\x07\xf1\xa4\x03\xa3\x44"
+ "\x41\x61\x87\x5d\x6b\xb3\xef\xd4"
+ "\xfc\xaa\x32\x7e\x55\x58\x04\x41"
+ "\xc9\x07\x33\xc6\xa2\x68\xd6\x5a"
+ "\x55\x79\x4b\x6f\xcf\x89\xb9\x19"
+ "\xe5\x54\x13\x15\xb2\x1a\xfa\x15"
+ "\xc2\xf0\x06\x59\xfa\xa0\x25\x05"
+ "\x58\xfa\x43\x91\x16\x85\x40\xbb"
+ "\x0d\x34\x4d\xc5\x1e\x20\xd5\x08"
+ "\xcd\x22\x22\x41\x11\x9f\x6c\x7c"
+ "\x8d\x57\xc9\xba\x57\xe8\x2c\xf7"
+ "\xa0\x42\xa8\xde\xfc\xa3\xca\x98"
+ "\x4b\x43\xb1\xce\x4b\xbf\x01\x67"
+ "\x6e\x29\x60\xbd\x10\x14\x84\x82"
+ "\x83\x82\x0c\x63\x73\x92\x02\x7c"
+ "\x55\x37\x20\x80\x17\x51\xc8\xbc"
+ "\x46\x02\xcb\x38\x07\x6d\xe2\x85"
+ "\xaa\x29\xaf\x24\x58\x0d\xf0\x75"
+ "\x08\x0a\xa5\x34\x25\x16\xf3\x74"
+ "\xa7\x0b\x97\xbe\xc1\xa9\xdc\x29"
+ "\x1a\x0a\x56\xc1\x1a\x91\x97\x8c"
+ "\x0b\xc7\x16\xed\x5a\x22\xa6\x2e"
+ "\x8c\x2b\x4f\x54\x76\x47\x53\x8e"
+ "\xe8\x00\xec\x92\xb9\x55\xe6\xa2"
+ "\xf3\xe2\x4f\x6a\x66\x60\xd0\x87"
+ "\xe6\xd1\xcc\xe3\x6a\xc5\x2d\x21"
+ "\xcc\x9d\x6a\xb6\x75\xaa\xe2\x19"
+ "\x21\x9f\xa1\x5e\x4c\xfd\x72\xf9"
+ "\x94\x4e\x63\xc7\xae\xfc\xed\x47"
+ "\xe2\xfe\x7a\x63\x77\xfe\x97\x82"
+ "\xb1\x10\x6e\x36\x1d\xe1\xc4\x80"
+ "\xec\x69\x41\xec\xa7\x8a\xe0\x2f"
+ "\xe3\x49\x26\xa2\x41\xb2\x08\x0f"
+ "\x28\xb4\xa7\x39\xa1\x99\x2d\x1e"
+ "\x43\x42\x35\xd0\xcf\xec\x77\x67"
+ "\xb2\x3b\x9e\x1c\x35\xde\x4f\x5e"
+ "\x73\x3f\x5d\x6f\x07\x4b\x2e\x50"
+ "\xab\x6c\x6b\xff\xea\x00\x67\xaa"
+ "\x0e\x82\x32\xdd\x3d\xb5\xe5\x76"
+ "\x2b\x77\x3f\xbe\x12\x75\xfb\x92"
+ "\xc6\x89\x67\x4d\xca\xf7\xd4\x50"
+ "\xc0\x74\x47\xcc\xd9\x0a\xd4\xc6"
+ "\x3b\x17\x2e\xe3\x35\xbb\x53\xb5"
+ "\x86\xad\x51\xcc\xd5\x96\xb8\xdc"
+ "\x03\x57\xe6\x98\x52\x2f\x61\x62"
+ "\xc4\x5c\x9c\x36\x71\x07\xfb\x94"
+ "\xe3\x02\xc4\x2b\x08\x75\xc7\x35"
+ "\xfb\x2e\x88\x7b\xbb\x67\x00\xe1"
+ "\xc9\xdd\x99\xb2\x13\x53\x1a\x4e"
+ "\x76\x87\x19\x04\x1a\x2f\x38\x3e"
+ "\xef\x91\x64\x1d\x18\x07\x4e\x31"
+ "\x88\x21\x7c\xb0\xa5\x12\x4c\x3c"
+ "\xb0\x20\xbd\xda\xdf\xf9\x7c\xdd",
+ .ilen = 512,
+ .result = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .rlen = 512,
+ }, {
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x62\x49\x77\x57\x24\x70\x93\x69"
+ "\x99\x59\x57\x49\x66\x96\x76\x27"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95"
+ "\x02\x88\x41\x97\x16\x93\x99\x37"
+ "\x51\x05\x82\x09\x74\x94\x45\x92",
+ .klen = 64,
+ .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x2b\xc9\xb4\x6b\x10\x94\xa9\x32"
+ "\xaa\xb0\x20\xc6\x44\x3d\x74\x1f"
+ "\x75\x01\xa7\xf6\xf5\xf7\x62\x1b"
+ "\x80\x1b\x82\xcb\x01\x59\x91\x7f"
+ "\x80\x3a\x98\xf0\xd2\xca\xc4\xc3"
+ "\x34\xfd\xe6\x11\xf9\x33\x45\x12"
+ "\x48\xc5\x8c\x25\xf1\xc5\xc5\x23"
+ "\xd3\x44\xb4\x73\xd5\x04\xc0\xb7"
+ "\xca\x2f\xf5\xcd\xc5\xb4\xdd\xb0"
+ "\xf4\x60\xe8\xfb\xc6\x9c\xc5\x78"
+ "\xcd\xec\x7d\xdc\x19\x9c\x72\x64"
+ "\x63\x0b\x38\x2e\x76\xdd\x2d\x36"
+ "\x49\xb0\x1d\xea\x78\x9e\x00\xca"
+ "\x20\xcc\x1b\x1e\x98\x74\xab\xed"
+ "\x79\xf7\xd0\x6c\xd8\x93\x80\x29"
+ "\xac\xa5\x5e\x34\xa9\xab\xa0\x55"
+ "\x9a\xea\xaa\x95\x4d\x7b\xfe\x46"
+ "\x26\x8a\xfd\x88\xa2\xa8\xa6\xae"
+ "\x25\x42\x17\xbf\x76\x8f\x1c\x3d"
+ "\xec\x9a\xda\x64\x96\xb5\x61\xff"
+ "\x99\xeb\x12\x96\x85\x82\x9d\xd5"
+ "\x81\x85\x14\xa8\x59\xac\x8c\x94"
+ "\xbb\x3b\x85\x2b\xdf\xb3\x0c\xba"
+ "\x82\xc6\x4d\xca\x86\xea\x53\x28"
+ "\x4c\xe0\x4e\x31\xe3\x73\x2f\x79"
+ "\x9d\x42\xe1\x03\xe3\x8b\xc4\xff"
+ "\x05\xca\x81\x7b\xda\xa2\xde\x63"
+ "\x3a\x10\xbe\xc2\xac\x32\xc4\x05"
+ "\x47\x7e\xef\x67\xe2\x5f\x5b\xae"
+ "\xed\xf1\x70\x34\x16\x9a\x07\x7b"
+ "\xf2\x25\x2b\xb0\xf8\x3c\x15\x9a"
+ "\xa6\x59\x55\x5f\xc1\xf4\x1e\xcd"
+ "\x93\x1f\x06\xba\xd4\x9a\x22\x69"
+ "\xfa\x8e\x95\x0d\xf3\x23\x59\x2c"
+ "\xfe\x00\xba\xf0\x0e\xbc\x6d\xd6"
+ "\x62\xf0\x7a\x0e\x83\x3e\xdb\x32"
+ "\xfd\x43\x7d\xda\x42\x51\x87\x43"
+ "\x9d\xf9\xef\xf4\x30\x97\xf8\x09"
+ "\x88\xfc\x3f\x93\x70\xc1\x4a\xec"
+ "\x27\x5f\x11\xac\x71\xc7\x48\x46"
+ "\x2f\xf9\xdf\x8d\x9f\xf7\x2e\x56"
+ "\x0d\x4e\xb0\x32\x76\xce\x86\x81"
+ "\xcd\xdf\xe4\x00\xbf\xfd\x5f\x24"
+ "\xaf\xf7\x9a\xde\xff\x18\xac\x14"
+ "\x90\xc5\x01\x39\x34\x0f\x24\xf3"
+ "\x13\x2f\x5e\x4f\x30\x9a\x36\x40"
+ "\xec\xea\xbc\xcd\x9e\x0e\x5b\x23"
+ "\x50\x88\x97\x40\x69\xb1\x37\xf5"
+ "\xc3\x15\xf9\x3f\xb7\x79\x64\xe8"
+ "\x7b\x10\x20\xb9\x2b\x46\x83\x5b"
+ "\xd8\x39\xfc\xe4\xfa\x88\x52\xf2"
+ "\x72\xb0\x97\x4e\x89\xb3\x48\x00"
+ "\xc1\x16\x73\x50\x77\xba\xa6\x65"
+ "\x20\x2d\xb0\x02\x27\x89\xda\x99"
+ "\x45\xfb\xe9\xd3\x1d\x39\x2f\xd6"
+ "\x2a\xda\x09\x12\x11\xaf\xe6\x57"
+ "\x01\x04\x8a\xff\x86\x8b\xac\xf8"
+ "\xee\xe4\x1c\x98\x5b\xcf\x6b\x76"
+ "\xa3\x0e\x33\x74\x40\x18\x39\x72"
+ "\x66\x50\x31\xfd\x70\xdf\xe8\x51"
+ "\x96\x21\x36\xb2\x9b\xfa\x85\xd1"
+ "\x30\x05\xc8\x92\x98\x80\xff\x7a"
+ "\xaf\x43\x0b\xc5\x20\x41\x92\x20"
+ "\xd4\xa0\x91\x98\x11\x5f\x4d\xb1",
+ .ilen = 512,
+ .result = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
/* Cast6 test vectors from RFC 2612 */
-#define CAST6_ENC_TEST_VECTORS 3
-#define CAST6_DEC_TEST_VECTORS 3
+#define CAST6_ENC_TEST_VECTORS 4
+#define CAST6_DEC_TEST_VECTORS 4
+#define CAST6_CBC_ENC_TEST_VECTORS 1
+#define CAST6_CBC_DEC_TEST_VECTORS 1
+#define CAST6_CTR_ENC_TEST_VECTORS 2
+#define CAST6_CTR_DEC_TEST_VECTORS 2
+#define CAST6_LRW_ENC_TEST_VECTORS 1
+#define CAST6_LRW_DEC_TEST_VECTORS 1
+#define CAST6_XTS_ENC_TEST_VECTORS 1
+#define CAST6_XTS_DEC_TEST_VECTORS 1
static struct cipher_testvec cast6_enc_tv_template[] = {
{
@@ -2787,6 +12047,143 @@ static struct cipher_testvec cast6_enc_tv_template[] = {
.result = "\x4f\x6a\x20\x38\x28\x68\x97\xb9"
"\xc9\x87\x01\x36\x55\x33\x17\xfa",
.rlen = 16,
+ }, { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\xC3\x70\x22\x32\xF5\x80\xCB\x54"
+ "\xFC\x30\xE0\xF6\xEB\x39\x57\xA6"
+ "\xB6\xB9\xC5\xA4\x91\x55\x14\x97"
+ "\xC1\x20\xFF\x6C\x5C\xF0\x67\xEA"
+ "\x2F\xED\xD8\xC9\xFB\x38\x3F\xFE"
+ "\x93\xBE\xDC\x00\xD3\x7F\xAD\x4C"
+ "\x5A\x08\x92\xD1\x47\x0C\xFA\x6C"
+ "\xD0\x6A\x99\x10\x72\xF8\x47\x62"
+ "\x81\x42\xF8\xD8\xF5\xBB\x94\x08"
+ "\xAA\x97\xA2\x8B\x69\xB3\xD2\x7E"
+ "\xBC\xB5\x00\x0C\xE5\x44\x4B\x58"
+ "\xE8\x63\xDC\xB3\xC4\xE5\x23\x12"
+ "\x5A\x72\x85\x47\x8B\xEC\x9F\x26"
+ "\x84\xB6\xED\x10\x33\x63\x9B\x5F"
+ "\x4D\x53\xEE\x94\x45\x8B\x60\x58"
+ "\x86\x20\xF9\x1E\x82\x08\x3E\x58"
+ "\x60\x1B\x34\x19\x02\xBE\x4E\x09"
+ "\xBB\x7C\x15\xCC\x60\x27\x55\x7A"
+ "\x12\xB8\xD8\x08\x89\x3C\xA6\xF3"
+ "\xF1\xDD\xA7\x07\xA3\x12\x85\x28"
+ "\xE9\x57\xAC\x80\x0C\x5C\x0F\x3A"
+ "\x5D\xC2\x91\xC7\x90\xE4\x8C\x43"
+ "\x92\xE4\x7C\x26\x69\x4D\x83\x68"
+ "\x14\x96\x42\x47\xBD\xA9\xE4\x8A"
+ "\x33\x19\xEB\x54\x8E\x0D\x4B\x6E"
+ "\x91\x51\xB5\x36\x08\xDE\x1C\x06"
+ "\x03\xBD\xDE\x81\x26\xF7\x99\xC2"
+ "\xBA\xF7\x6D\x87\x0D\xE4\xA6\xCF"
+ "\xC1\xF5\x27\x05\xB8\x02\x57\x72"
+ "\xE6\x42\x13\x0B\xC6\x47\x05\x74"
+ "\x24\x15\xF7\x0D\xC2\x23\x9D\xB9"
+ "\x3C\x77\x18\x93\xBA\xB4\xFC\x8C"
+ "\x98\x82\x67\x67\xB4\xD7\xD3\x43"
+ "\x23\x08\x02\xB7\x9B\x99\x05\xFB"
+ "\xD3\xB5\x00\x0A\xA9\x9D\x66\xD6"
+ "\x2E\x49\x58\xD0\xA8\x57\x29\x7F"
+ "\x0A\x0E\x7D\xFC\x92\x83\xCC\x67"
+ "\xA2\xB1\x70\x3A\x8F\x87\x4A\x8D"
+ "\x17\xE2\x58\x2B\x88\x0D\x68\x62"
+ "\xBF\x35\xD1\x6F\xC0\xF0\x18\x62"
+ "\xB2\xC7\x2D\x58\xC7\x16\xDE\x08"
+ "\xEB\x84\x1D\x25\xA7\x38\x94\x06"
+ "\x93\x9D\xF8\xFE\x88\x71\xE7\x84"
+ "\x2C\xA0\x38\xA3\x1D\x48\xCF\x29"
+ "\x0B\xBC\xD8\x50\x99\x1A\x26\xFB"
+ "\x8E\x75\x3D\x73\xEB\x6A\xED\x29"
+ "\xE0\x8E\xED\xFC\xFE\x6F\xF6\xBA"
+ "\x41\xE2\x10\x4C\x01\x8B\x69\x2B"
+ "\x25\x3F\x4D\x70\x7B\x92\xD6\x3B"
+ "\xAC\xF9\x77\x18\xD9\x6A\x30\xA6"
+ "\x2E\xFA\x30\xFF\xC8\xD5\x1D\x06"
+ "\x59\x28\x1D\x86\x43\x04\x5D\x3B"
+ "\x99\x4C\x04\x5A\x21\x17\x8B\x76"
+ "\x8F\x72\xCB\xA1\x9C\x29\x4C\xC3"
+ "\x65\xA2\x58\x2A\xC5\x66\x24\xBF"
+ "\xBA\xE6\x0C\xDD\x34\x24\x74\xC8"
+ "\x84\x0A\x66\x2C\xBE\x8F\x32\xA9"
+ "\xE7\xE4\xA1\xD7\xDA\xAB\x23\x1E"
+ "\xEB\xEE\x6C\x94\x6F\x9C\x2E\xD1"
+ "\x49\x2C\xF3\xD4\x90\xCC\x93\x4C"
+ "\x84\x52\x6D\x68\xDE\xC6\x64\xB2"
+ "\x11\x74\x93\x57\xB4\x7E\xC6\x00",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -2821,6 +12218,1331 @@ static struct cipher_testvec cast6_dec_tv_template[] = {
.ilen = 16,
.result = zeroed_string,
.rlen = 16,
+ }, { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\xC3\x70\x22\x32\xF5\x80\xCB\x54"
+ "\xFC\x30\xE0\xF6\xEB\x39\x57\xA6"
+ "\xB6\xB9\xC5\xA4\x91\x55\x14\x97"
+ "\xC1\x20\xFF\x6C\x5C\xF0\x67\xEA"
+ "\x2F\xED\xD8\xC9\xFB\x38\x3F\xFE"
+ "\x93\xBE\xDC\x00\xD3\x7F\xAD\x4C"
+ "\x5A\x08\x92\xD1\x47\x0C\xFA\x6C"
+ "\xD0\x6A\x99\x10\x72\xF8\x47\x62"
+ "\x81\x42\xF8\xD8\xF5\xBB\x94\x08"
+ "\xAA\x97\xA2\x8B\x69\xB3\xD2\x7E"
+ "\xBC\xB5\x00\x0C\xE5\x44\x4B\x58"
+ "\xE8\x63\xDC\xB3\xC4\xE5\x23\x12"
+ "\x5A\x72\x85\x47\x8B\xEC\x9F\x26"
+ "\x84\xB6\xED\x10\x33\x63\x9B\x5F"
+ "\x4D\x53\xEE\x94\x45\x8B\x60\x58"
+ "\x86\x20\xF9\x1E\x82\x08\x3E\x58"
+ "\x60\x1B\x34\x19\x02\xBE\x4E\x09"
+ "\xBB\x7C\x15\xCC\x60\x27\x55\x7A"
+ "\x12\xB8\xD8\x08\x89\x3C\xA6\xF3"
+ "\xF1\xDD\xA7\x07\xA3\x12\x85\x28"
+ "\xE9\x57\xAC\x80\x0C\x5C\x0F\x3A"
+ "\x5D\xC2\x91\xC7\x90\xE4\x8C\x43"
+ "\x92\xE4\x7C\x26\x69\x4D\x83\x68"
+ "\x14\x96\x42\x47\xBD\xA9\xE4\x8A"
+ "\x33\x19\xEB\x54\x8E\x0D\x4B\x6E"
+ "\x91\x51\xB5\x36\x08\xDE\x1C\x06"
+ "\x03\xBD\xDE\x81\x26\xF7\x99\xC2"
+ "\xBA\xF7\x6D\x87\x0D\xE4\xA6\xCF"
+ "\xC1\xF5\x27\x05\xB8\x02\x57\x72"
+ "\xE6\x42\x13\x0B\xC6\x47\x05\x74"
+ "\x24\x15\xF7\x0D\xC2\x23\x9D\xB9"
+ "\x3C\x77\x18\x93\xBA\xB4\xFC\x8C"
+ "\x98\x82\x67\x67\xB4\xD7\xD3\x43"
+ "\x23\x08\x02\xB7\x9B\x99\x05\xFB"
+ "\xD3\xB5\x00\x0A\xA9\x9D\x66\xD6"
+ "\x2E\x49\x58\xD0\xA8\x57\x29\x7F"
+ "\x0A\x0E\x7D\xFC\x92\x83\xCC\x67"
+ "\xA2\xB1\x70\x3A\x8F\x87\x4A\x8D"
+ "\x17\xE2\x58\x2B\x88\x0D\x68\x62"
+ "\xBF\x35\xD1\x6F\xC0\xF0\x18\x62"
+ "\xB2\xC7\x2D\x58\xC7\x16\xDE\x08"
+ "\xEB\x84\x1D\x25\xA7\x38\x94\x06"
+ "\x93\x9D\xF8\xFE\x88\x71\xE7\x84"
+ "\x2C\xA0\x38\xA3\x1D\x48\xCF\x29"
+ "\x0B\xBC\xD8\x50\x99\x1A\x26\xFB"
+ "\x8E\x75\x3D\x73\xEB\x6A\xED\x29"
+ "\xE0\x8E\xED\xFC\xFE\x6F\xF6\xBA"
+ "\x41\xE2\x10\x4C\x01\x8B\x69\x2B"
+ "\x25\x3F\x4D\x70\x7B\x92\xD6\x3B"
+ "\xAC\xF9\x77\x18\xD9\x6A\x30\xA6"
+ "\x2E\xFA\x30\xFF\xC8\xD5\x1D\x06"
+ "\x59\x28\x1D\x86\x43\x04\x5D\x3B"
+ "\x99\x4C\x04\x5A\x21\x17\x8B\x76"
+ "\x8F\x72\xCB\xA1\x9C\x29\x4C\xC3"
+ "\x65\xA2\x58\x2A\xC5\x66\x24\xBF"
+ "\xBA\xE6\x0C\xDD\x34\x24\x74\xC8"
+ "\x84\x0A\x66\x2C\xBE\x8F\x32\xA9"
+ "\xE7\xE4\xA1\xD7\xDA\xAB\x23\x1E"
+ "\xEB\xEE\x6C\x94\x6F\x9C\x2E\xD1"
+ "\x49\x2C\xF3\xD4\x90\xCC\x93\x4C"
+ "\x84\x52\x6D\x68\xDE\xC6\x64\xB2"
+ "\x11\x74\x93\x57\xB4\x7E\xC6\x00",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec cast6_cbc_enc_tv_template[] = {
+ { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\xDF\x77\x68\x96\xC7\xBA\xF8\xE2"
+ "\x0E\x24\x99\x1A\xAA\xF3\xC6\x9F"
+ "\xA0\x73\xB3\x70\xC3\x68\x64\x70"
+ "\xAD\x33\x02\xFB\x88\x74\xAA\x78"
+ "\xC7\x47\x1A\x18\x61\x2D\xAC\x9F"
+ "\x7E\x6F\xDF\x05\x13\x76\xA6\x72"
+ "\xB7\x13\x09\x0F\x7D\x38\xDF\x25"
+ "\x4E\xFD\x50\x45\xFA\x35\x6A\xC0"
+ "\x57\x95\xE1\x21\x26\x10\x9A\x21"
+ "\xA1\x8A\x51\x05\xD1\xB1\x78\x35"
+ "\x98\xF5\xAE\xC0\xC1\x8B\x94\xFF"
+ "\xD0\x69\x3F\x42\xC2\x01\xA7\x9B"
+ "\x23\x16\x47\x72\x81\x13\x3A\x72"
+ "\xEC\xD9\x40\x88\x00\x9C\xB0\xA8"
+ "\x9C\xAC\xCE\x11\x73\x7B\x63\x3E"
+ "\xA3\x63\x98\x7D\x35\xE4\xD9\x83"
+ "\xE2\xD0\x52\x87\x0C\x1F\xB0\xB3"
+ "\x41\x1A\x93\x8D\x76\x31\x9F\xF2"
+ "\xFE\x09\xA3\x8F\x22\x6A\x3B\xB9"
+ "\x6C\x9E\xE4\xA1\xA0\xC4\xE7\xA1"
+ "\x21\x9C\x1A\xCA\x65\xDE\x44\x03"
+ "\x99\xF2\xD2\x39\xE3\x3F\x0F\x37"
+ "\x53\x50\x23\xA4\x81\x6E\xDA\xFB"
+ "\xF8\x7B\x01\xD7\xB2\x32\x9C\xB8"
+ "\xB1\x0E\x99\x17\xB5\x38\xF9\xD7"
+ "\x86\x2D\x6E\x94\x5C\x99\x9D\xB3"
+ "\xD3\x63\x4B\x2A\x7D\x44\x6A\xB2"
+ "\xC1\x03\xE6\x5A\x37\xD8\x64\x18"
+ "\xAA\x32\xCE\x29\xED\xC0\xA2\xCB"
+ "\x8D\xAF\xCD\xBE\x8F\xB6\xEC\xB4"
+ "\x89\x05\x81\x6E\x71\x4F\xC3\x28"
+ "\x10\xC1\x62\xC4\x41\xE9\xD2\x39"
+ "\xF3\x22\x39\x12\x2C\xC2\x95\x2D"
+ "\xBF\x93\x58\x4B\x04\xD1\x8D\x57"
+ "\xAE\xEB\x60\x03\x56\x35\xAD\x5A"
+ "\xE9\xC3\xFF\x4E\x31\xE1\x37\xF8"
+ "\x7D\xEE\x65\x8A\xB6\x88\x1A\x3E"
+ "\x07\x09\x82\xBA\xF0\x80\x8A\xD0"
+ "\xA0\x3F\x6A\xE9\x24\x87\x19\x65"
+ "\x73\x3F\x12\x91\x47\x54\xBA\x39"
+ "\x30\x5B\x1E\xE5\xC2\xF9\x3F\xEF"
+ "\xD6\x75\xF9\xB8\x7C\x8B\x05\x76"
+ "\xEE\xB7\x08\x25\x4B\xB6\x7B\x47"
+ "\x72\xC0\x4C\xD4\xDA\xE0\x75\xF1"
+ "\x7C\xE8\x94\x9E\x16\x6E\xB8\x12"
+ "\xA1\xC1\x6E\x3B\x1C\x59\x41\x2D"
+ "\x23\xFA\x7D\x77\xB8\x46\x75\xFE"
+ "\x4F\x10\xD3\x09\x60\xA1\x36\x96"
+ "\x5B\xC2\xDC\x6E\x84\x7D\x9B\x14"
+ "\x80\x21\x83\x58\x3C\x76\xFD\x28"
+ "\x1D\xF9\x93\x13\xD7\x0E\x62\x14"
+ "\x5A\xC5\x4E\x08\xA5\x56\xA4\x3C"
+ "\x68\x93\x44\x70\xDF\xCF\x4A\x51"
+ "\x0B\x81\x29\x41\xE5\x62\x4D\x36"
+ "\xB3\xEA\x94\xA6\xB9\xDD\x3F\x09"
+ "\x62\x34\xA0\x6A\x7E\x7D\xF5\xF6"
+ "\x01\x91\xB4\x27\xDA\x59\xD6\x17"
+ "\x56\x4D\x82\x62\x37\xA3\x48\x01"
+ "\x99\x91\x77\xB2\x08\x6B\x2C\x37"
+ "\xC5\x5C\xAD\xB6\x07\xB6\x84\xF3"
+ "\x4D\x59\x7D\xC5\x28\x69\xFA\x92"
+ "\x22\x46\x89\x2D\x0F\x2B\x08\x24",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec cast6_cbc_dec_tv_template[] = {
+ { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\xDF\x77\x68\x96\xC7\xBA\xF8\xE2"
+ "\x0E\x24\x99\x1A\xAA\xF3\xC6\x9F"
+ "\xA0\x73\xB3\x70\xC3\x68\x64\x70"
+ "\xAD\x33\x02\xFB\x88\x74\xAA\x78"
+ "\xC7\x47\x1A\x18\x61\x2D\xAC\x9F"
+ "\x7E\x6F\xDF\x05\x13\x76\xA6\x72"
+ "\xB7\x13\x09\x0F\x7D\x38\xDF\x25"
+ "\x4E\xFD\x50\x45\xFA\x35\x6A\xC0"
+ "\x57\x95\xE1\x21\x26\x10\x9A\x21"
+ "\xA1\x8A\x51\x05\xD1\xB1\x78\x35"
+ "\x98\xF5\xAE\xC0\xC1\x8B\x94\xFF"
+ "\xD0\x69\x3F\x42\xC2\x01\xA7\x9B"
+ "\x23\x16\x47\x72\x81\x13\x3A\x72"
+ "\xEC\xD9\x40\x88\x00\x9C\xB0\xA8"
+ "\x9C\xAC\xCE\x11\x73\x7B\x63\x3E"
+ "\xA3\x63\x98\x7D\x35\xE4\xD9\x83"
+ "\xE2\xD0\x52\x87\x0C\x1F\xB0\xB3"
+ "\x41\x1A\x93\x8D\x76\x31\x9F\xF2"
+ "\xFE\x09\xA3\x8F\x22\x6A\x3B\xB9"
+ "\x6C\x9E\xE4\xA1\xA0\xC4\xE7\xA1"
+ "\x21\x9C\x1A\xCA\x65\xDE\x44\x03"
+ "\x99\xF2\xD2\x39\xE3\x3F\x0F\x37"
+ "\x53\x50\x23\xA4\x81\x6E\xDA\xFB"
+ "\xF8\x7B\x01\xD7\xB2\x32\x9C\xB8"
+ "\xB1\x0E\x99\x17\xB5\x38\xF9\xD7"
+ "\x86\x2D\x6E\x94\x5C\x99\x9D\xB3"
+ "\xD3\x63\x4B\x2A\x7D\x44\x6A\xB2"
+ "\xC1\x03\xE6\x5A\x37\xD8\x64\x18"
+ "\xAA\x32\xCE\x29\xED\xC0\xA2\xCB"
+ "\x8D\xAF\xCD\xBE\x8F\xB6\xEC\xB4"
+ "\x89\x05\x81\x6E\x71\x4F\xC3\x28"
+ "\x10\xC1\x62\xC4\x41\xE9\xD2\x39"
+ "\xF3\x22\x39\x12\x2C\xC2\x95\x2D"
+ "\xBF\x93\x58\x4B\x04\xD1\x8D\x57"
+ "\xAE\xEB\x60\x03\x56\x35\xAD\x5A"
+ "\xE9\xC3\xFF\x4E\x31\xE1\x37\xF8"
+ "\x7D\xEE\x65\x8A\xB6\x88\x1A\x3E"
+ "\x07\x09\x82\xBA\xF0\x80\x8A\xD0"
+ "\xA0\x3F\x6A\xE9\x24\x87\x19\x65"
+ "\x73\x3F\x12\x91\x47\x54\xBA\x39"
+ "\x30\x5B\x1E\xE5\xC2\xF9\x3F\xEF"
+ "\xD6\x75\xF9\xB8\x7C\x8B\x05\x76"
+ "\xEE\xB7\x08\x25\x4B\xB6\x7B\x47"
+ "\x72\xC0\x4C\xD4\xDA\xE0\x75\xF1"
+ "\x7C\xE8\x94\x9E\x16\x6E\xB8\x12"
+ "\xA1\xC1\x6E\x3B\x1C\x59\x41\x2D"
+ "\x23\xFA\x7D\x77\xB8\x46\x75\xFE"
+ "\x4F\x10\xD3\x09\x60\xA1\x36\x96"
+ "\x5B\xC2\xDC\x6E\x84\x7D\x9B\x14"
+ "\x80\x21\x83\x58\x3C\x76\xFD\x28"
+ "\x1D\xF9\x93\x13\xD7\x0E\x62\x14"
+ "\x5A\xC5\x4E\x08\xA5\x56\xA4\x3C"
+ "\x68\x93\x44\x70\xDF\xCF\x4A\x51"
+ "\x0B\x81\x29\x41\xE5\x62\x4D\x36"
+ "\xB3\xEA\x94\xA6\xB9\xDD\x3F\x09"
+ "\x62\x34\xA0\x6A\x7E\x7D\xF5\xF6"
+ "\x01\x91\xB4\x27\xDA\x59\xD6\x17"
+ "\x56\x4D\x82\x62\x37\xA3\x48\x01"
+ "\x99\x91\x77\xB2\x08\x6B\x2C\x37"
+ "\xC5\x5C\xAD\xB6\x07\xB6\x84\xF3"
+ "\x4D\x59\x7D\xC5\x28\x69\xFA\x92"
+ "\x22\x46\x89\x2D\x0F\x2B\x08\x24",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec cast6_ctr_enc_tv_template[] = {
+ { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A",
+ .ilen = 17,
+ .result = "\x26\x0A\xF1\xE2\x3F\x8A\xEF\xA3"
+ "\x53\x9A\x5E\x1B\x2A\x1A\xC6\x0A"
+ "\x57",
+ .rlen = 17,
+ }, { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\x26\x0A\xF1\xE2\x3F\x8A\xEF\xA3"
+ "\x53\x9A\x5E\x1B\x2A\x1A\xC6\x0A"
+ "\x57\xA3\xEF\x47\x2A\xE8\x88\xA7"
+ "\x3C\xD0\xEC\xB9\x94\x50\x7D\x56"
+ "\xBC\xE1\xC1\xF5\xE1\xEE\x12\xF8"
+ "\x4F\x03\x82\x3A\x93\x6B\x4C\xD3"
+ "\xE3\xF3\xFA\xC2\x23\x55\x98\x20"
+ "\x49\x76\x9B\x6B\xC1\x23\xBF\xE5"
+ "\xD4\xC4\x2F\x61\xE1\x67\x2A\x30"
+ "\x6F\x29\xCA\x54\xF8\x1B\xA6\x7D"
+ "\x66\x45\xEE\xC8\x19\xBE\x50\xF0"
+ "\x5F\x65\xF8\x1E\x4D\x07\x87\xD9"
+ "\xD3\xD9\x1B\x09\x89\xFD\x42\xC5"
+ "\xDB\xEB\x86\xF1\x67\x04\x0F\x5C"
+ "\x81\xDF\x82\x12\xC7\x4C\x1B\x07"
+ "\xDE\xE6\xFA\x29\x86\xD1\xB0\xBA"
+ "\x3D\x6A\x69\x76\xEC\x0F\xB4\xE6"
+ "\xCD\xA7\xF8\xA8\xB8\xE0\x33\xF5"
+ "\x49\x61\x22\x52\x64\x8C\x46\x41"
+ "\x1F\x48\x5F\x4F\xA2\x89\x36\x17"
+ "\x20\xF8\x2F\x8F\x4B\xFA\xF2\xC0"
+ "\x1E\x18\xA2\xF8\xB7\x6D\x98\xE3"
+ "\x00\x14\x15\x59\xC1\x30\x64\xAF"
+ "\xA8\x01\x38\xAB\xD4\x8B\xEC\x7C"
+ "\x44\x9A\xC6\x2C\x2E\x2B\x2B\xF4"
+ "\x02\x37\xC4\x69\xEF\x36\xC1\xF3"
+ "\xA0\xFB\xFE\x29\xAD\x39\xCF\xD0"
+ "\x51\x73\xA3\x22\x42\x41\xAB\xD2"
+ "\x0F\x50\x14\xB9\x54\xD3\xD4\xFA"
+ "\xBF\xC9\xBB\xCE\xC4\x1D\x2D\xAF"
+ "\xC9\x3F\x07\x87\x42\x4B\x3A\x54"
+ "\x34\x8E\x37\xA3\x03\x6F\x65\x66"
+ "\xDB\x44\xC3\xE8\xD7\xDD\x7D\xDD"
+ "\x61\xB4\x2B\x80\xA3\x98\x13\xF5"
+ "\x5A\xD3\x34\x58\xC3\x6E\xF6\xB8"
+ "\x0A\xC6\x50\x01\x8E\xD5\x6C\x7D"
+ "\xFE\x16\xB6\xCF\xFC\x51\x40\xAE"
+ "\xB3\x15\xAC\x90\x6F\x0B\x28\x3A"
+ "\x60\x40\x38\x90\x20\x46\xC7\xB3"
+ "\x0B\x12\x6D\x3B\x15\x14\xF9\xF4"
+ "\x11\x41\x76\x6B\xB3\x60\x82\x3C"
+ "\x84\xFB\x08\x2E\x92\x25\xCB\x79"
+ "\x6F\x58\xC5\x94\x00\x00\x47\xB6"
+ "\x9E\xDC\x0F\x29\x70\x46\x20\x76"
+ "\x65\x75\x66\x5C\x00\x96\xB3\xE1"
+ "\x0B\xA7\x11\x8B\x2E\x61\x4E\x45"
+ "\x73\xFC\x91\xAB\x79\x41\x23\x14"
+ "\x13\xB6\x72\x6C\x46\xB3\x03\x11"
+ "\xE4\xF1\xEE\xC9\x7A\xCF\x96\x32"
+ "\xB6\xF0\x8B\x97\xB4\xCF\x82\xB7"
+ "\x15\x48\x44\x99\x09\xF6\xE0\xD7"
+ "\xBC\xF1\x5B\x91\x4F\x30\x22\xA2"
+ "\x45\xC4\x68\x55\xC2\xBE\xA7\xD2"
+ "\x12\x53\x35\x9C\xF9\xE7\x35\x5D"
+ "\x81\xE4\x86\x42\xC3\x58\xFB\xF0"
+ "\x38\x9B\x8E\x5A\xEF\x83\x33\x0F"
+ "\x00\x4E\x3F\x9F\xF5\x84\x62\xC4"
+ "\x19\x35\x88\x22\x45\x59\x0E\x8F"
+ "\xEC\x27\xDD\x4A\xA4\x1F\xBC\x41"
+ "\x9B\x66\x8D\x32\xBA\x81\x34\x87"
+ "\x0E\x74\x33\x30\x62\xB9\x89\xDF"
+ "\xF9\xC5\xDD\x27\xB3\x39\xCB\xCB",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec cast6_ctr_dec_tv_template[] = {
+ { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x26\x0A\xF1\xE2\x3F\x8A\xEF\xA3"
+ "\x53\x9A\x5E\x1B\x2A\x1A\xC6\x0A"
+ "\x57",
+ .ilen = 17,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A",
+ .rlen = 17,
+ }, { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x26\x0A\xF1\xE2\x3F\x8A\xEF\xA3"
+ "\x53\x9A\x5E\x1B\x2A\x1A\xC6\x0A"
+ "\x57\xA3\xEF\x47\x2A\xE8\x88\xA7"
+ "\x3C\xD0\xEC\xB9\x94\x50\x7D\x56"
+ "\xBC\xE1\xC1\xF5\xE1\xEE\x12\xF8"
+ "\x4F\x03\x82\x3A\x93\x6B\x4C\xD3"
+ "\xE3\xF3\xFA\xC2\x23\x55\x98\x20"
+ "\x49\x76\x9B\x6B\xC1\x23\xBF\xE5"
+ "\xD4\xC4\x2F\x61\xE1\x67\x2A\x30"
+ "\x6F\x29\xCA\x54\xF8\x1B\xA6\x7D"
+ "\x66\x45\xEE\xC8\x19\xBE\x50\xF0"
+ "\x5F\x65\xF8\x1E\x4D\x07\x87\xD9"
+ "\xD3\xD9\x1B\x09\x89\xFD\x42\xC5"
+ "\xDB\xEB\x86\xF1\x67\x04\x0F\x5C"
+ "\x81\xDF\x82\x12\xC7\x4C\x1B\x07"
+ "\xDE\xE6\xFA\x29\x86\xD1\xB0\xBA"
+ "\x3D\x6A\x69\x76\xEC\x0F\xB4\xE6"
+ "\xCD\xA7\xF8\xA8\xB8\xE0\x33\xF5"
+ "\x49\x61\x22\x52\x64\x8C\x46\x41"
+ "\x1F\x48\x5F\x4F\xA2\x89\x36\x17"
+ "\x20\xF8\x2F\x8F\x4B\xFA\xF2\xC0"
+ "\x1E\x18\xA2\xF8\xB7\x6D\x98\xE3"
+ "\x00\x14\x15\x59\xC1\x30\x64\xAF"
+ "\xA8\x01\x38\xAB\xD4\x8B\xEC\x7C"
+ "\x44\x9A\xC6\x2C\x2E\x2B\x2B\xF4"
+ "\x02\x37\xC4\x69\xEF\x36\xC1\xF3"
+ "\xA0\xFB\xFE\x29\xAD\x39\xCF\xD0"
+ "\x51\x73\xA3\x22\x42\x41\xAB\xD2"
+ "\x0F\x50\x14\xB9\x54\xD3\xD4\xFA"
+ "\xBF\xC9\xBB\xCE\xC4\x1D\x2D\xAF"
+ "\xC9\x3F\x07\x87\x42\x4B\x3A\x54"
+ "\x34\x8E\x37\xA3\x03\x6F\x65\x66"
+ "\xDB\x44\xC3\xE8\xD7\xDD\x7D\xDD"
+ "\x61\xB4\x2B\x80\xA3\x98\x13\xF5"
+ "\x5A\xD3\x34\x58\xC3\x6E\xF6\xB8"
+ "\x0A\xC6\x50\x01\x8E\xD5\x6C\x7D"
+ "\xFE\x16\xB6\xCF\xFC\x51\x40\xAE"
+ "\xB3\x15\xAC\x90\x6F\x0B\x28\x3A"
+ "\x60\x40\x38\x90\x20\x46\xC7\xB3"
+ "\x0B\x12\x6D\x3B\x15\x14\xF9\xF4"
+ "\x11\x41\x76\x6B\xB3\x60\x82\x3C"
+ "\x84\xFB\x08\x2E\x92\x25\xCB\x79"
+ "\x6F\x58\xC5\x94\x00\x00\x47\xB6"
+ "\x9E\xDC\x0F\x29\x70\x46\x20\x76"
+ "\x65\x75\x66\x5C\x00\x96\xB3\xE1"
+ "\x0B\xA7\x11\x8B\x2E\x61\x4E\x45"
+ "\x73\xFC\x91\xAB\x79\x41\x23\x14"
+ "\x13\xB6\x72\x6C\x46\xB3\x03\x11"
+ "\xE4\xF1\xEE\xC9\x7A\xCF\x96\x32"
+ "\xB6\xF0\x8B\x97\xB4\xCF\x82\xB7"
+ "\x15\x48\x44\x99\x09\xF6\xE0\xD7"
+ "\xBC\xF1\x5B\x91\x4F\x30\x22\xA2"
+ "\x45\xC4\x68\x55\xC2\xBE\xA7\xD2"
+ "\x12\x53\x35\x9C\xF9\xE7\x35\x5D"
+ "\x81\xE4\x86\x42\xC3\x58\xFB\xF0"
+ "\x38\x9B\x8E\x5A\xEF\x83\x33\x0F"
+ "\x00\x4E\x3F\x9F\xF5\x84\x62\xC4"
+ "\x19\x35\x88\x22\x45\x59\x0E\x8F"
+ "\xEC\x27\xDD\x4A\xA4\x1F\xBC\x41"
+ "\x9B\x66\x8D\x32\xBA\x81\x34\x87"
+ "\x0E\x74\x33\x30\x62\xB9\x89\xDF"
+ "\xF9\xC5\xDD\x27\xB3\x39\xCB\xCB",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec cast6_lrw_enc_tv_template[] = {
+ { /* Generated from TF test vectors */
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x05\x11\xb7\x18\xab\xc6\x2d\xac"
+ "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c"
+ "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8"
+ "\x50\x38\x1f\x71\x49\xb6\x57\xd6"
+ "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90"
+ "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6"
+ "\xad\x1e\x9e\x20\x5f\x38\xbe\x04"
+ "\xda\x10\x8e\xed\xa2\xa4\x87\xab"
+ "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c"
+ "\xc9\xac\x42\x31\x95\x7c\xc9\x04"
+ "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6"
+ "\x15\xd7\x3f\x4f\x2f\x66\x69\x03"
+ "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65"
+ "\x4c\x96\x12\xed\x7c\x92\x03\x01"
+ "\x6f\xbc\x35\x93\xac\xf1\x27\xf1"
+ "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50"
+ "\x89\xa4\x8e\x66\x44\x85\xcc\xfd"
+ "\x33\x14\x70\xe3\x96\xb2\xc3\xd3"
+ "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5"
+ "\x2d\x64\x75\xdd\xb4\x54\xe6\x74"
+ "\x8c\xd3\x9d\x9e\x86\xab\x51\x53"
+ "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40"
+ "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5"
+ "\x76\x12\x73\x44\x1a\x56\xd7\x72"
+ "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda"
+ "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd"
+ "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60"
+ "\x1a\xe2\x70\x85\x58\xc2\x1b\x09"
+ "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9"
+ "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8"
+ "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8"
+ "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10"
+ "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1"
+ "\x90\x3e\x76\x4a\x74\xa4\x21\x2c"
+ "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e"
+ "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f"
+ "\x8d\x23\x31\x74\x84\xeb\x88\x6e"
+ "\xcc\xb9\xbc\x22\x83\x19\x07\x22"
+ "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78"
+ "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5"
+ "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41"
+ "\x3c\xce\x8f\x42\x60\x71\xa7\x75"
+ "\x08\x40\x65\x8a\x82\xbf\xf5\x43"
+ "\x71\x96\xa9\x4d\x44\x8a\x20\xbe"
+ "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65"
+ "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9"
+ "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4"
+ "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a"
+ "\x62\x73\x65\xfd\x46\x63\x25\x3d"
+ "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf"
+ "\x24\xf3\xb4\xac\x64\xba\xdf\x4b"
+ "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7"
+ "\xc5\x68\x77\x84\x32\x2b\xcc\x85"
+ "\x74\x96\xf0\x12\x77\x61\xb9\xeb"
+ "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8"
+ "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24"
+ "\xda\x39\x87\x45\xc0\x2b\xbb\x01"
+ "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce"
+ "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6"
+ "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32"
+ "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45"
+ "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6"
+ "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4"
+ "\x21\xc4\xc2\x75\x67\x89\x37\x0a",
+ .ilen = 512,
+ .result = "\x55\x25\x09\x8B\xB5\xD5\xF8\xBF"
+ "\x37\x4A\xFE\x3C\x47\xD8\xE6\xEB"
+ "\xCA\xA4\x9B\xB0\xAB\x6D\x64\xCA"
+ "\x58\xB6\x73\xF0\xD7\x52\x34\xEF"
+ "\xFB\x3E\x96\x81\xB7\x71\x34\xA4"
+ "\x55\x20\xBE\x39\x5A\x2B\xF9\xD1"
+ "\x65\x0B\xDA\xD3\x7E\xB3\xA6\xF7"
+ "\x2E\x0B\x5A\x52\xDB\x39\x8C\x9B"
+ "\x61\x17\x5F\xAF\xB6\x5A\xC8\x08"
+ "\xA7\xB7\x2A\x11\x7C\x97\x38\x9D"
+ "\x59\x0E\x66\x59\x5E\xD8\x8B\xCE"
+ "\x70\xE0\xC3\x42\xB0\x8C\x0F\xBA"
+ "\xB2\x0D\x81\xB6\xBE\x61\x1C\x2D"
+ "\x7E\xEA\x91\x25\xAC\xEC\xF8\x28"
+ "\x80\x1D\xF0\x30\xBA\x62\x77\x7D"
+ "\xDB\x15\x69\xDF\xFA\x2A\x81\x64"
+ "\x95\x5B\xA4\x7F\x3E\x4F\xE3\x30"
+ "\xB0\x5C\xC2\x05\xF8\xF0\x29\xE7"
+ "\x0A\xA0\x66\xB2\x5D\x0F\x39\x2B"
+ "\xB4\xB3\x00\xA9\xD0\xAB\x63\x61"
+ "\x5E\xDB\xFC\x11\x74\x25\x96\x65"
+ "\xE8\xE2\x34\x57\x77\x15\x5E\x70"
+ "\xFF\x10\x90\xC3\x64\xF0\x11\x0A"
+ "\x63\x3A\xD3\x55\x92\x15\x4B\x0C"
+ "\xC7\x08\x89\x17\x3B\x99\xAD\x63"
+ "\xE7\x06\xDF\x52\xBC\x15\x64\x45"
+ "\x9D\x7A\xFB\x69\xBC\x2D\x6E\xA9"
+ "\x35\xD9\xD8\xF5\x0C\xC4\xA2\x23"
+ "\x9C\x18\x8B\xA8\x8C\xFE\xF8\x0E"
+ "\xBD\xAB\x60\x1A\x51\x17\x54\x27"
+ "\xB6\xE8\xBE\x0F\xA9\xA5\x82\x19"
+ "\x2F\x6F\x20\xA7\x47\xED\x74\x6C"
+ "\x4E\xC1\xF8\x8C\x14\xF3\xBB\x1F"
+ "\xED\x4D\x8F\x7C\x37\xEF\x19\xA1"
+ "\x07\x16\xDE\x76\xCC\x5E\x94\x02"
+ "\xFB\xBF\xE4\x81\x50\xCE\xFC\x0F"
+ "\x9E\xCF\x3D\xF6\x67\x00\xBF\xA7"
+ "\x6E\x21\x58\x36\x06\xDE\xB3\xD4"
+ "\xA2\xFA\xD8\x4E\xE0\xB9\x7F\x23"
+ "\x51\x21\x2B\x32\x68\xAA\xF8\xA8"
+ "\x93\x08\xB5\x6D\xE6\x43\x2C\xB7"
+ "\x31\xB2\x0F\xD0\xA2\x51\xC0\x25"
+ "\x30\xC7\x10\x3F\x97\x27\x01\x8E"
+ "\xFA\xD8\x4F\x78\xD8\x2E\x1D\xEB"
+ "\xA1\x37\x52\x0F\x7B\x5E\x87\xA8"
+ "\x22\xE2\xE6\x92\xA7\x5F\x11\x32"
+ "\xCC\x93\x34\xFC\xD1\x7E\xAE\x54"
+ "\xBC\x6A\x1B\x91\xD1\x2E\x21\xEC"
+ "\x5D\xF1\xC4\xF1\x55\x20\xBF\xE5"
+ "\x96\x3D\x69\x91\x20\x4E\xF2\x61"
+ "\xDA\x77\xFE\xEE\xC3\x74\x57\x2A"
+ "\x78\x39\xB0\xE0\xCF\x12\x56\xD6"
+ "\x05\xDC\xF9\x19\x66\x44\x1D\xF9"
+ "\x82\x37\xD4\xC2\x60\xB6\x31\xDF"
+ "\x0C\xAF\xBC\x8B\x55\x9A\xC8\x2D"
+ "\xAB\xA7\x88\x7B\x41\xE8\x29\xC9"
+ "\x9B\x8D\xA7\x00\x86\x25\xB6\x14"
+ "\xF5\x13\x73\xD7\x4B\x6B\x83\xF3"
+ "\xAF\x96\x00\xE4\xB7\x3C\x65\xA6"
+ "\x15\xB7\x94\x7D\x4E\x70\x4C\x75"
+ "\xF3\xB4\x02\xA9\x17\x1C\x7A\x0A"
+ "\xC0\xD5\x33\x11\x56\xDE\xDC\xF5"
+ "\x8D\xD9\xCD\x3B\x22\x67\x18\xC7"
+ "\xC4\xF5\x99\x61\xBC\xBB\x5B\x46",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec cast6_lrw_dec_tv_template[] = {
+ { /* Generated from TF test vectors */
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x55\x25\x09\x8B\xB5\xD5\xF8\xBF"
+ "\x37\x4A\xFE\x3C\x47\xD8\xE6\xEB"
+ "\xCA\xA4\x9B\xB0\xAB\x6D\x64\xCA"
+ "\x58\xB6\x73\xF0\xD7\x52\x34\xEF"
+ "\xFB\x3E\x96\x81\xB7\x71\x34\xA4"
+ "\x55\x20\xBE\x39\x5A\x2B\xF9\xD1"
+ "\x65\x0B\xDA\xD3\x7E\xB3\xA6\xF7"
+ "\x2E\x0B\x5A\x52\xDB\x39\x8C\x9B"
+ "\x61\x17\x5F\xAF\xB6\x5A\xC8\x08"
+ "\xA7\xB7\x2A\x11\x7C\x97\x38\x9D"
+ "\x59\x0E\x66\x59\x5E\xD8\x8B\xCE"
+ "\x70\xE0\xC3\x42\xB0\x8C\x0F\xBA"
+ "\xB2\x0D\x81\xB6\xBE\x61\x1C\x2D"
+ "\x7E\xEA\x91\x25\xAC\xEC\xF8\x28"
+ "\x80\x1D\xF0\x30\xBA\x62\x77\x7D"
+ "\xDB\x15\x69\xDF\xFA\x2A\x81\x64"
+ "\x95\x5B\xA4\x7F\x3E\x4F\xE3\x30"
+ "\xB0\x5C\xC2\x05\xF8\xF0\x29\xE7"
+ "\x0A\xA0\x66\xB2\x5D\x0F\x39\x2B"
+ "\xB4\xB3\x00\xA9\xD0\xAB\x63\x61"
+ "\x5E\xDB\xFC\x11\x74\x25\x96\x65"
+ "\xE8\xE2\x34\x57\x77\x15\x5E\x70"
+ "\xFF\x10\x90\xC3\x64\xF0\x11\x0A"
+ "\x63\x3A\xD3\x55\x92\x15\x4B\x0C"
+ "\xC7\x08\x89\x17\x3B\x99\xAD\x63"
+ "\xE7\x06\xDF\x52\xBC\x15\x64\x45"
+ "\x9D\x7A\xFB\x69\xBC\x2D\x6E\xA9"
+ "\x35\xD9\xD8\xF5\x0C\xC4\xA2\x23"
+ "\x9C\x18\x8B\xA8\x8C\xFE\xF8\x0E"
+ "\xBD\xAB\x60\x1A\x51\x17\x54\x27"
+ "\xB6\xE8\xBE\x0F\xA9\xA5\x82\x19"
+ "\x2F\x6F\x20\xA7\x47\xED\x74\x6C"
+ "\x4E\xC1\xF8\x8C\x14\xF3\xBB\x1F"
+ "\xED\x4D\x8F\x7C\x37\xEF\x19\xA1"
+ "\x07\x16\xDE\x76\xCC\x5E\x94\x02"
+ "\xFB\xBF\xE4\x81\x50\xCE\xFC\x0F"
+ "\x9E\xCF\x3D\xF6\x67\x00\xBF\xA7"
+ "\x6E\x21\x58\x36\x06\xDE\xB3\xD4"
+ "\xA2\xFA\xD8\x4E\xE0\xB9\x7F\x23"
+ "\x51\x21\x2B\x32\x68\xAA\xF8\xA8"
+ "\x93\x08\xB5\x6D\xE6\x43\x2C\xB7"
+ "\x31\xB2\x0F\xD0\xA2\x51\xC0\x25"
+ "\x30\xC7\x10\x3F\x97\x27\x01\x8E"
+ "\xFA\xD8\x4F\x78\xD8\x2E\x1D\xEB"
+ "\xA1\x37\x52\x0F\x7B\x5E\x87\xA8"
+ "\x22\xE2\xE6\x92\xA7\x5F\x11\x32"
+ "\xCC\x93\x34\xFC\xD1\x7E\xAE\x54"
+ "\xBC\x6A\x1B\x91\xD1\x2E\x21\xEC"
+ "\x5D\xF1\xC4\xF1\x55\x20\xBF\xE5"
+ "\x96\x3D\x69\x91\x20\x4E\xF2\x61"
+ "\xDA\x77\xFE\xEE\xC3\x74\x57\x2A"
+ "\x78\x39\xB0\xE0\xCF\x12\x56\xD6"
+ "\x05\xDC\xF9\x19\x66\x44\x1D\xF9"
+ "\x82\x37\xD4\xC2\x60\xB6\x31\xDF"
+ "\x0C\xAF\xBC\x8B\x55\x9A\xC8\x2D"
+ "\xAB\xA7\x88\x7B\x41\xE8\x29\xC9"
+ "\x9B\x8D\xA7\x00\x86\x25\xB6\x14"
+ "\xF5\x13\x73\xD7\x4B\x6B\x83\xF3"
+ "\xAF\x96\x00\xE4\xB7\x3C\x65\xA6"
+ "\x15\xB7\x94\x7D\x4E\x70\x4C\x75"
+ "\xF3\xB4\x02\xA9\x17\x1C\x7A\x0A"
+ "\xC0\xD5\x33\x11\x56\xDE\xDC\xF5"
+ "\x8D\xD9\xCD\x3B\x22\x67\x18\xC7"
+ "\xC4\xF5\x99\x61\xBC\xBB\x5B\x46",
+ .ilen = 512,
+ .result = "\x05\x11\xb7\x18\xab\xc6\x2d\xac"
+ "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c"
+ "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8"
+ "\x50\x38\x1f\x71\x49\xb6\x57\xd6"
+ "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90"
+ "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6"
+ "\xad\x1e\x9e\x20\x5f\x38\xbe\x04"
+ "\xda\x10\x8e\xed\xa2\xa4\x87\xab"
+ "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c"
+ "\xc9\xac\x42\x31\x95\x7c\xc9\x04"
+ "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6"
+ "\x15\xd7\x3f\x4f\x2f\x66\x69\x03"
+ "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65"
+ "\x4c\x96\x12\xed\x7c\x92\x03\x01"
+ "\x6f\xbc\x35\x93\xac\xf1\x27\xf1"
+ "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50"
+ "\x89\xa4\x8e\x66\x44\x85\xcc\xfd"
+ "\x33\x14\x70\xe3\x96\xb2\xc3\xd3"
+ "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5"
+ "\x2d\x64\x75\xdd\xb4\x54\xe6\x74"
+ "\x8c\xd3\x9d\x9e\x86\xab\x51\x53"
+ "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40"
+ "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5"
+ "\x76\x12\x73\x44\x1a\x56\xd7\x72"
+ "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda"
+ "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd"
+ "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60"
+ "\x1a\xe2\x70\x85\x58\xc2\x1b\x09"
+ "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9"
+ "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8"
+ "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8"
+ "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10"
+ "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1"
+ "\x90\x3e\x76\x4a\x74\xa4\x21\x2c"
+ "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e"
+ "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f"
+ "\x8d\x23\x31\x74\x84\xeb\x88\x6e"
+ "\xcc\xb9\xbc\x22\x83\x19\x07\x22"
+ "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78"
+ "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5"
+ "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41"
+ "\x3c\xce\x8f\x42\x60\x71\xa7\x75"
+ "\x08\x40\x65\x8a\x82\xbf\xf5\x43"
+ "\x71\x96\xa9\x4d\x44\x8a\x20\xbe"
+ "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65"
+ "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9"
+ "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4"
+ "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a"
+ "\x62\x73\x65\xfd\x46\x63\x25\x3d"
+ "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf"
+ "\x24\xf3\xb4\xac\x64\xba\xdf\x4b"
+ "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7"
+ "\xc5\x68\x77\x84\x32\x2b\xcc\x85"
+ "\x74\x96\xf0\x12\x77\x61\xb9\xeb"
+ "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8"
+ "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24"
+ "\xda\x39\x87\x45\xc0\x2b\xbb\x01"
+ "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce"
+ "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6"
+ "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32"
+ "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45"
+ "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6"
+ "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4"
+ "\x21\xc4\xc2\x75\x67\x89\x37\x0a",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec cast6_xts_enc_tv_template[] = {
+ { /* Generated from TF test vectors */
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x62\x49\x77\x57\x24\x70\x93\x69"
+ "\x99\x59\x57\x49\x66\x96\x76\x27"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95"
+ "\x02\x88\x41\x97\x16\x93\x99\x37"
+ "\x51\x05\x82\x09\x74\x94\x45\x92",
+ .klen = 64,
+ .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .ilen = 512,
+ .result = "\xDE\x6F\x22\xA5\xE8\x39\xE8\x78"
+ "\x88\x5A\x4F\x8D\x82\x76\x52\x6D"
+ "\xB2\x41\x16\xF4\x2B\xA6\xEB\xF6"
+ "\xE2\xC5\x62\x8D\x61\xA1\x01\xED"
+ "\xD9\x38\x01\xC1\x43\x63\x4E\x88"
+ "\xC9\x4B\x5A\x88\x80\xB7\x5C\x71"
+ "\x47\xEE\x11\xD8\xB7\x2D\x5D\x13"
+ "\x1A\xB1\x68\x5B\x61\xA7\xA9\x81"
+ "\x8B\x83\xA1\x6A\xAA\x36\xD6\xB6"
+ "\x60\x54\x09\x32\xFE\x6A\x76\x2E"
+ "\x28\xFF\xD5\xD6\xDD\x1D\x45\x7D"
+ "\xF0\x8B\xF3\x32\x4E\x6C\x12\xCB"
+ "\xB8\x25\x70\xF8\x40\xBC\x90\x1B"
+ "\x11\xC3\x59\xAF\xF0\x2F\x92\xDD"
+ "\xD3\x3B\xCF\x60\xA1\x78\x94\x57"
+ "\xAF\x76\xC1\x67\xA6\x3C\xCD\x98"
+ "\xB1\xF7\x27\xB9\xA3\xBD\x10\xEA"
+ "\xCD\x8B\xC2\xF2\x14\xF2\xB2\x67"
+ "\x05\xDD\x1D\x58\x6E\x2F\x95\x08"
+ "\x3A\xF8\x78\x76\x82\x56\xA7\xEC"
+ "\x51\x4B\x85\x77\xC2\x4C\x4A\x34"
+ "\x71\x38\x17\x91\x44\xE8\xFC\x65"
+ "\x99\x0D\x52\x91\xEE\xF8\xEF\x27"
+ "\x2A\x9E\x6E\x78\xC4\x26\x87\xF4"
+ "\x8A\xF0\x2D\x04\xE8\x14\x92\x5D"
+ "\x59\x22\x9B\x29\x5C\x18\xF0\xC3"
+ "\x47\xF3\x76\xD8\xE4\xF3\x1B\xD1"
+ "\x70\xA3\x0D\xB5\x70\x02\x1D\xA3"
+ "\x91\x3B\x49\x73\x18\xAB\xD4\xC9"
+ "\xC3\x1E\xEF\x1F\xFE\xD5\x59\x8A"
+ "\xD7\xF6\xC9\x71\x67\x79\xD7\x0E"
+ "\xBE\x1F\x8E\xEC\x55\x7E\x4F\x24"
+ "\xE6\x87\xEA\xFE\x96\x25\x67\x8E"
+ "\x93\x03\xFA\xFF\xCE\xAF\xB2\x3C"
+ "\x6F\xEB\x57\xFB\xD3\x28\x87\xA9"
+ "\xCE\xC2\xF5\x9C\xC6\x67\xB5\x97"
+ "\x49\xF7\x04\xCB\xEF\x84\x98\x33"
+ "\xAF\x38\xD3\x04\x1C\x24\x71\x38"
+ "\xC7\x71\xDD\x43\x0D\x12\x4A\x18"
+ "\xBA\xC4\xAF\xBA\xB2\x5B\xEB\x95"
+ "\x02\x43\x5D\xCE\x19\xCC\xCD\x66"
+ "\x91\x0B\x8C\x7F\x51\xC4\xBF\x3C"
+ "\x8B\xF1\xCC\xAA\x29\xD7\x87\xCB"
+ "\x3E\xC5\xF3\xC9\x75\xE8\xA3\x5B"
+ "\x30\x45\xA9\xB7\xAF\x80\x64\x6F"
+ "\x75\x4A\xA7\xC0\x6D\x19\x6B\xDE"
+ "\x17\xDE\x6D\xEA\x87\x9F\x95\xAE"
+ "\xF5\x3C\xEE\x54\xB8\x27\x84\xF8"
+ "\x97\xA3\xE1\x6F\x38\x24\x34\x88"
+ "\xCE\xBD\x32\x52\xE0\x00\x6C\x94"
+ "\xC9\xD7\x5D\x37\x81\x33\x2E\x7F"
+ "\x4F\x7E\x2E\x0D\x94\xBD\xEA\x59"
+ "\x34\x39\xA8\x35\x12\xB7\xBC\xAC"
+ "\xEA\x52\x9C\x78\x02\x6D\x92\x36"
+ "\xFB\x59\x2B\xA4\xEA\x7B\x1B\x83"
+ "\xE1\x4D\x5E\x2A\x7E\x92\xB1\x64"
+ "\xDE\xE0\x27\x4B\x0A\x6F\x4C\xE3"
+ "\xB0\xEB\x31\xE4\x69\x95\xAB\x35"
+ "\x8B\x2C\xF5\x6B\x7F\xF1\xA2\x82"
+ "\xF8\xD9\x47\x82\xA9\x82\x03\x91"
+ "\x69\x1F\xBE\x4C\xE7\xC7\x34\x2F"
+ "\x45\x72\x80\x17\x81\xBD\x9D\x62"
+ "\xA1\xAC\xE8\xCF\xC6\x74\xCF\xDC"
+ "\x22\x60\x4E\xE8\xA4\x5D\x85\xB9",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec cast6_xts_dec_tv_template[] = {
+ { /* Generated from TF test vectors */
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x62\x49\x77\x57\x24\x70\x93\x69"
+ "\x99\x59\x57\x49\x66\x96\x76\x27"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95"
+ "\x02\x88\x41\x97\x16\x93\x99\x37"
+ "\x51\x05\x82\x09\x74\x94\x45\x92",
+ .klen = 64,
+ .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\xDE\x6F\x22\xA5\xE8\x39\xE8\x78"
+ "\x88\x5A\x4F\x8D\x82\x76\x52\x6D"
+ "\xB2\x41\x16\xF4\x2B\xA6\xEB\xF6"
+ "\xE2\xC5\x62\x8D\x61\xA1\x01\xED"
+ "\xD9\x38\x01\xC1\x43\x63\x4E\x88"
+ "\xC9\x4B\x5A\x88\x80\xB7\x5C\x71"
+ "\x47\xEE\x11\xD8\xB7\x2D\x5D\x13"
+ "\x1A\xB1\x68\x5B\x61\xA7\xA9\x81"
+ "\x8B\x83\xA1\x6A\xAA\x36\xD6\xB6"
+ "\x60\x54\x09\x32\xFE\x6A\x76\x2E"
+ "\x28\xFF\xD5\xD6\xDD\x1D\x45\x7D"
+ "\xF0\x8B\xF3\x32\x4E\x6C\x12\xCB"
+ "\xB8\x25\x70\xF8\x40\xBC\x90\x1B"
+ "\x11\xC3\x59\xAF\xF0\x2F\x92\xDD"
+ "\xD3\x3B\xCF\x60\xA1\x78\x94\x57"
+ "\xAF\x76\xC1\x67\xA6\x3C\xCD\x98"
+ "\xB1\xF7\x27\xB9\xA3\xBD\x10\xEA"
+ "\xCD\x8B\xC2\xF2\x14\xF2\xB2\x67"
+ "\x05\xDD\x1D\x58\x6E\x2F\x95\x08"
+ "\x3A\xF8\x78\x76\x82\x56\xA7\xEC"
+ "\x51\x4B\x85\x77\xC2\x4C\x4A\x34"
+ "\x71\x38\x17\x91\x44\xE8\xFC\x65"
+ "\x99\x0D\x52\x91\xEE\xF8\xEF\x27"
+ "\x2A\x9E\x6E\x78\xC4\x26\x87\xF4"
+ "\x8A\xF0\x2D\x04\xE8\x14\x92\x5D"
+ "\x59\x22\x9B\x29\x5C\x18\xF0\xC3"
+ "\x47\xF3\x76\xD8\xE4\xF3\x1B\xD1"
+ "\x70\xA3\x0D\xB5\x70\x02\x1D\xA3"
+ "\x91\x3B\x49\x73\x18\xAB\xD4\xC9"
+ "\xC3\x1E\xEF\x1F\xFE\xD5\x59\x8A"
+ "\xD7\xF6\xC9\x71\x67\x79\xD7\x0E"
+ "\xBE\x1F\x8E\xEC\x55\x7E\x4F\x24"
+ "\xE6\x87\xEA\xFE\x96\x25\x67\x8E"
+ "\x93\x03\xFA\xFF\xCE\xAF\xB2\x3C"
+ "\x6F\xEB\x57\xFB\xD3\x28\x87\xA9"
+ "\xCE\xC2\xF5\x9C\xC6\x67\xB5\x97"
+ "\x49\xF7\x04\xCB\xEF\x84\x98\x33"
+ "\xAF\x38\xD3\x04\x1C\x24\x71\x38"
+ "\xC7\x71\xDD\x43\x0D\x12\x4A\x18"
+ "\xBA\xC4\xAF\xBA\xB2\x5B\xEB\x95"
+ "\x02\x43\x5D\xCE\x19\xCC\xCD\x66"
+ "\x91\x0B\x8C\x7F\x51\xC4\xBF\x3C"
+ "\x8B\xF1\xCC\xAA\x29\xD7\x87\xCB"
+ "\x3E\xC5\xF3\xC9\x75\xE8\xA3\x5B"
+ "\x30\x45\xA9\xB7\xAF\x80\x64\x6F"
+ "\x75\x4A\xA7\xC0\x6D\x19\x6B\xDE"
+ "\x17\xDE\x6D\xEA\x87\x9F\x95\xAE"
+ "\xF5\x3C\xEE\x54\xB8\x27\x84\xF8"
+ "\x97\xA3\xE1\x6F\x38\x24\x34\x88"
+ "\xCE\xBD\x32\x52\xE0\x00\x6C\x94"
+ "\xC9\xD7\x5D\x37\x81\x33\x2E\x7F"
+ "\x4F\x7E\x2E\x0D\x94\xBD\xEA\x59"
+ "\x34\x39\xA8\x35\x12\xB7\xBC\xAC"
+ "\xEA\x52\x9C\x78\x02\x6D\x92\x36"
+ "\xFB\x59\x2B\xA4\xEA\x7B\x1B\x83"
+ "\xE1\x4D\x5E\x2A\x7E\x92\xB1\x64"
+ "\xDE\xE0\x27\x4B\x0A\x6F\x4C\xE3"
+ "\xB0\xEB\x31\xE4\x69\x95\xAB\x35"
+ "\x8B\x2C\xF5\x6B\x7F\xF1\xA2\x82"
+ "\xF8\xD9\x47\x82\xA9\x82\x03\x91"
+ "\x69\x1F\xBE\x4C\xE7\xC7\x34\x2F"
+ "\x45\x72\x80\x17\x81\xBD\x9D\x62"
+ "\xA1\xAC\xE8\xCF\xC6\x74\xCF\xDC"
+ "\x22\x60\x4E\xE8\xA4\x5D\x85\xB9",
+ .ilen = 512,
+ .result = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
},
};
@@ -2828,20 +13550,37 @@ static struct cipher_testvec cast6_dec_tv_template[] = {
/*
* AES test vectors.
*/
-#define AES_ENC_TEST_VECTORS 3
-#define AES_DEC_TEST_VECTORS 3
-#define AES_CBC_ENC_TEST_VECTORS 4
-#define AES_CBC_DEC_TEST_VECTORS 4
+#define AES_ENC_TEST_VECTORS 4
+#define AES_DEC_TEST_VECTORS 4
+#define AES_CBC_ENC_TEST_VECTORS 5
+#define AES_CBC_DEC_TEST_VECTORS 5
+#define HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS 2
+#define HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS 2
+#define HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC 2
+#define HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC 2
+#define HMAC_SHA1_AES_CBC_ENC_TEST_VEC 7
+#define HMAC_SHA256_AES_CBC_ENC_TEST_VEC 7
+#define HMAC_SHA512_AES_CBC_ENC_TEST_VEC 7
#define AES_LRW_ENC_TEST_VECTORS 8
#define AES_LRW_DEC_TEST_VECTORS 8
-#define AES_XTS_ENC_TEST_VECTORS 4
-#define AES_XTS_DEC_TEST_VECTORS 4
-#define AES_CTR_ENC_TEST_VECTORS 7
-#define AES_CTR_DEC_TEST_VECTORS 6
+#define AES_XTS_ENC_TEST_VECTORS 5
+#define AES_XTS_DEC_TEST_VECTORS 5
+#define AES_CTR_ENC_TEST_VECTORS 5
+#define AES_CTR_DEC_TEST_VECTORS 5
+#define AES_OFB_ENC_TEST_VECTORS 1
+#define AES_OFB_DEC_TEST_VECTORS 1
+#define AES_CTR_3686_ENC_TEST_VECTORS 7
+#define AES_CTR_3686_DEC_TEST_VECTORS 6
#define AES_GCM_ENC_TEST_VECTORS 9
#define AES_GCM_DEC_TEST_VECTORS 8
-#define AES_CCM_ENC_TEST_VECTORS 7
+#define AES_GCM_4106_ENC_TEST_VECTORS 7
+#define AES_GCM_4106_DEC_TEST_VECTORS 7
+#define AES_GCM_4543_ENC_TEST_VECTORS 1
+#define AES_GCM_4543_DEC_TEST_VECTORS 2
+#define AES_CCM_ENC_TEST_VECTORS 8
#define AES_CCM_DEC_TEST_VECTORS 7
+#define AES_CCM_4309_ENC_TEST_VECTORS 7
+#define AES_CCM_4309_DEC_TEST_VECTORS 10
static struct cipher_testvec aes_enc_tv_template[] = {
{ /* From FIPS-197 */
@@ -2877,6 +13616,141 @@ static struct cipher_testvec aes_enc_tv_template[] = {
.result = "\x8e\xa2\xb7\xca\x51\x67\x45\xbf"
"\xea\xfc\x49\x90\x4b\x49\x60\x89",
.rlen = 16,
+ }, { /* Generated with Crypto++ */
+ .key = "\xA6\xC9\x83\xA6\xC9\xEC\x0F\x32"
+ "\x55\x0F\x32\x55\x78\x9B\xBE\x78"
+ "\x9B\xBE\xE1\x04\x27\xE1\x04\x27"
+ "\x4A\x6D\x90\x4A\x6D\x90\xB3\xD6",
+ .klen = 32,
+ .input = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB"
+ "\x54\xE0\x49\xB2\x1B\xA7\x10\x79"
+ "\x05\x6E\xD7\x40\xCC\x35\x9E\x07"
+ "\x93\xFC\x65\xF1\x5A\xC3\x2C\xB8"
+ "\x21\x8A\x16\x7F\xE8\x51\xDD\x46"
+ "\xAF\x18\xA4\x0D\x76\x02\x6B\xD4"
+ "\x3D\xC9\x32\x9B\x04\x90\xF9\x62"
+ "\xEE\x57\xC0\x29\xB5\x1E\x87\x13"
+ "\x7C\xE5\x4E\xDA\x43\xAC\x15\xA1"
+ "\x0A\x73\xFF\x68\xD1\x3A\xC6\x2F"
+ "\x98\x01\x8D\xF6\x5F\xEB\x54\xBD"
+ "\x26\xB2\x1B\x84\x10\x79\xE2\x4B"
+ "\xD7\x40\xA9\x12\x9E\x07\x70\xFC"
+ "\x65\xCE\x37\xC3\x2C\x95\x21\x8A"
+ "\xF3\x5C\xE8\x51\xBA\x23\xAF\x18"
+ "\x81\x0D\x76\xDF\x48\xD4\x3D\xA6"
+ "\x0F\x9B\x04\x6D\xF9\x62\xCB\x34"
+ "\xC0\x29\x92\x1E\x87\xF0\x59\xE5"
+ "\x4E\xB7\x20\xAC\x15\x7E\x0A\x73"
+ "\xDC\x45\xD1\x3A\xA3\x0C\x98\x01"
+ "\x6A\xF6\x5F\xC8\x31\xBD\x26\x8F"
+ "\x1B\x84\xED\x56\xE2\x4B\xB4\x1D"
+ "\xA9\x12\x7B\x07\x70\xD9\x42\xCE"
+ "\x37\xA0\x09\x95\xFE\x67\xF3\x5C"
+ "\xC5\x2E\xBA\x23\x8C\x18\x81\xEA"
+ "\x53\xDF\x48\xB1\x1A\xA6\x0F\x78"
+ "\x04\x6D\xD6\x3F\xCB\x34\x9D\x06"
+ "\x92\xFB\x64\xF0\x59\xC2\x2B\xB7"
+ "\x20\x89\x15\x7E\xE7\x50\xDC\x45"
+ "\xAE\x17\xA3\x0C\x75\x01\x6A\xD3"
+ "\x3C\xC8\x31\x9A\x03\x8F\xF8\x61"
+ "\xED\x56\xBF\x28\xB4\x1D\x86\x12",
+ .ilen = 496,
+ .result = "\x71\x73\xF7\xDB\x24\x93\x21\x6D"
+ "\x61\x1E\xBB\x63\x42\x79\xDB\x64"
+ "\x6F\x82\xC0\xCA\xA3\x9B\xFA\x0B"
+ "\xD9\x08\xC7\x4A\x90\xAE\x8F\x5F"
+ "\x5E\x06\xF0\x5F\x31\x51\x18\x37"
+ "\x45\xD7\xCA\x3A\xFD\x6C\x3F\xE1"
+ "\xDD\x8D\x22\x65\x2B\x00\x50\xCE"
+ "\xBA\x28\x67\xD7\xCE\x0E\x0D\xEA"
+ "\x78\x69\x7F\xAE\x8F\x8B\x69\x37"
+ "\x75\xE0\xDC\x96\xE0\xB7\xF4\x09"
+ "\xCB\x6D\xA2\xFB\xDA\xAF\x09\xF8"
+ "\x81\x82\x27\xFA\x45\x9C\x29\xA4"
+ "\x22\x8B\x78\x69\x5B\x46\xF9\x39"
+ "\x1B\xCC\xF9\x1D\x09\xEB\xBC\x5C"
+ "\x41\x72\x51\x97\x1D\x07\x49\xA0"
+ "\x1B\x8E\x65\x4B\xB2\x6A\x12\x03"
+ "\x6A\x60\x95\xAC\xBD\xAC\x1A\x64"
+ "\xDE\x5A\xA5\xF0\x83\x2F\xCB\xCA"
+ "\x22\x74\xA6\x6C\x9B\x73\xCE\x3F"
+ "\xE1\x8B\x22\x17\x59\x0C\x47\x89"
+ "\x33\xA1\xD6\x47\x03\x19\x4F\xA8"
+ "\x67\x69\xF0\x5B\xF0\x20\xAD\x06"
+ "\x27\x81\x92\xD8\xC5\xBA\x98\x12"
+ "\xBE\x24\xB5\x2F\x75\x02\xC2\xAD"
+ "\x12\x2F\x07\x32\xEE\x39\xAF\x64"
+ "\x05\x8F\xB3\xD4\xEB\x1B\x46\x6E"
+ "\xD9\x21\xF9\xC4\xB7\xC9\x45\x68"
+ "\xB4\xA1\x74\x9F\x82\x47\xEB\xCC"
+ "\xBD\x0A\x14\x95\x0F\x8B\xA8\x2F"
+ "\x4B\x1B\xA7\xBF\x82\xA6\x43\x0C"
+ "\xB9\x39\x4A\xA8\x10\x6F\x50\x7B"
+ "\x25\xFB\x26\x81\xE0\x2F\xF0\x96"
+ "\x8D\x8B\xAC\x92\x0F\xF6\xED\x64"
+ "\x63\x29\x4C\x8E\x18\x13\xC5\xBF"
+ "\xFC\xA0\xD9\xBF\x7C\x3A\x0E\x29"
+ "\x6F\xD1\x6C\x6F\xA5\xDA\xBF\xB1"
+ "\x30\xEA\x44\x2D\xC3\x8F\x16\xE1"
+ "\x66\xFA\xA3\x21\x3E\xFC\x13\xCA"
+ "\xF0\xF6\xF0\x59\xBD\x8F\x38\x50"
+ "\x31\xCB\x69\x3F\x96\x15\xD6\xF5"
+ "\xAE\xFF\xF6\xAA\x41\x85\x4C\x10"
+ "\x58\xE3\xF9\x44\xE6\x28\xDA\x9A"
+ "\xDC\x6A\x80\x34\x73\x97\x1B\xC5"
+ "\xCA\x26\x16\x77\x0E\x60\xAB\x89"
+ "\x0F\x04\x27\xBD\xCE\x3E\x71\xB4"
+ "\xA0\xD7\x22\x7E\xDB\xEB\x24\x70"
+ "\x42\x71\x51\x78\x70\xB3\xE0\x3D"
+ "\x84\x8E\x8D\x7B\xD0\x6D\xEA\x92"
+ "\x11\x08\x42\x4F\xE5\xAD\x26\x92"
+ "\xD2\x00\xAE\xA8\xE3\x4B\x37\x47"
+ "\x22\xC1\x95\xC1\x63\x7F\xCB\x03"
+ "\xF3\xE3\xD7\x9D\x60\xC7\xBC\xEA"
+ "\x35\xA2\xFD\x45\x52\x39\x13\x6F"
+ "\xC1\x53\xF3\x53\xDF\x33\x84\xD7"
+ "\xD2\xC8\x37\xB0\x75\xE3\x41\x46"
+ "\xB3\xC7\x83\x2E\x8A\xBB\xA4\xE5"
+ "\x7F\x3C\xFD\x8B\xEB\xEA\x63\xBD"
+ "\xB7\x46\xE7\xBF\x09\x9C\x0D\x0F"
+ "\x40\x86\x7F\x51\xE1\x11\x9C\xCB"
+ "\x88\xE6\x68\x47\xE3\x2B\xC5\xFF"
+ "\x09\x79\xA0\x43\x5C\x0D\x08\x58"
+ "\x17\xBB\xC0\x6B\x62\x3F\x56\xE9",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -2914,6 +13788,141 @@ static struct cipher_testvec aes_dec_tv_template[] = {
.result = "\x00\x11\x22\x33\x44\x55\x66\x77"
"\x88\x99\xaa\xbb\xcc\xdd\xee\xff",
.rlen = 16,
+ }, { /* Generated with Crypto++ */
+ .key = "\xA6\xC9\x83\xA6\xC9\xEC\x0F\x32"
+ "\x55\x0F\x32\x55\x78\x9B\xBE\x78"
+ "\x9B\xBE\xE1\x04\x27\xE1\x04\x27"
+ "\x4A\x6D\x90\x4A\x6D\x90\xB3\xD6",
+ .klen = 32,
+ .input = "\x71\x73\xF7\xDB\x24\x93\x21\x6D"
+ "\x61\x1E\xBB\x63\x42\x79\xDB\x64"
+ "\x6F\x82\xC0\xCA\xA3\x9B\xFA\x0B"
+ "\xD9\x08\xC7\x4A\x90\xAE\x8F\x5F"
+ "\x5E\x06\xF0\x5F\x31\x51\x18\x37"
+ "\x45\xD7\xCA\x3A\xFD\x6C\x3F\xE1"
+ "\xDD\x8D\x22\x65\x2B\x00\x50\xCE"
+ "\xBA\x28\x67\xD7\xCE\x0E\x0D\xEA"
+ "\x78\x69\x7F\xAE\x8F\x8B\x69\x37"
+ "\x75\xE0\xDC\x96\xE0\xB7\xF4\x09"
+ "\xCB\x6D\xA2\xFB\xDA\xAF\x09\xF8"
+ "\x81\x82\x27\xFA\x45\x9C\x29\xA4"
+ "\x22\x8B\x78\x69\x5B\x46\xF9\x39"
+ "\x1B\xCC\xF9\x1D\x09\xEB\xBC\x5C"
+ "\x41\x72\x51\x97\x1D\x07\x49\xA0"
+ "\x1B\x8E\x65\x4B\xB2\x6A\x12\x03"
+ "\x6A\x60\x95\xAC\xBD\xAC\x1A\x64"
+ "\xDE\x5A\xA5\xF0\x83\x2F\xCB\xCA"
+ "\x22\x74\xA6\x6C\x9B\x73\xCE\x3F"
+ "\xE1\x8B\x22\x17\x59\x0C\x47\x89"
+ "\x33\xA1\xD6\x47\x03\x19\x4F\xA8"
+ "\x67\x69\xF0\x5B\xF0\x20\xAD\x06"
+ "\x27\x81\x92\xD8\xC5\xBA\x98\x12"
+ "\xBE\x24\xB5\x2F\x75\x02\xC2\xAD"
+ "\x12\x2F\x07\x32\xEE\x39\xAF\x64"
+ "\x05\x8F\xB3\xD4\xEB\x1B\x46\x6E"
+ "\xD9\x21\xF9\xC4\xB7\xC9\x45\x68"
+ "\xB4\xA1\x74\x9F\x82\x47\xEB\xCC"
+ "\xBD\x0A\x14\x95\x0F\x8B\xA8\x2F"
+ "\x4B\x1B\xA7\xBF\x82\xA6\x43\x0C"
+ "\xB9\x39\x4A\xA8\x10\x6F\x50\x7B"
+ "\x25\xFB\x26\x81\xE0\x2F\xF0\x96"
+ "\x8D\x8B\xAC\x92\x0F\xF6\xED\x64"
+ "\x63\x29\x4C\x8E\x18\x13\xC5\xBF"
+ "\xFC\xA0\xD9\xBF\x7C\x3A\x0E\x29"
+ "\x6F\xD1\x6C\x6F\xA5\xDA\xBF\xB1"
+ "\x30\xEA\x44\x2D\xC3\x8F\x16\xE1"
+ "\x66\xFA\xA3\x21\x3E\xFC\x13\xCA"
+ "\xF0\xF6\xF0\x59\xBD\x8F\x38\x50"
+ "\x31\xCB\x69\x3F\x96\x15\xD6\xF5"
+ "\xAE\xFF\xF6\xAA\x41\x85\x4C\x10"
+ "\x58\xE3\xF9\x44\xE6\x28\xDA\x9A"
+ "\xDC\x6A\x80\x34\x73\x97\x1B\xC5"
+ "\xCA\x26\x16\x77\x0E\x60\xAB\x89"
+ "\x0F\x04\x27\xBD\xCE\x3E\x71\xB4"
+ "\xA0\xD7\x22\x7E\xDB\xEB\x24\x70"
+ "\x42\x71\x51\x78\x70\xB3\xE0\x3D"
+ "\x84\x8E\x8D\x7B\xD0\x6D\xEA\x92"
+ "\x11\x08\x42\x4F\xE5\xAD\x26\x92"
+ "\xD2\x00\xAE\xA8\xE3\x4B\x37\x47"
+ "\x22\xC1\x95\xC1\x63\x7F\xCB\x03"
+ "\xF3\xE3\xD7\x9D\x60\xC7\xBC\xEA"
+ "\x35\xA2\xFD\x45\x52\x39\x13\x6F"
+ "\xC1\x53\xF3\x53\xDF\x33\x84\xD7"
+ "\xD2\xC8\x37\xB0\x75\xE3\x41\x46"
+ "\xB3\xC7\x83\x2E\x8A\xBB\xA4\xE5"
+ "\x7F\x3C\xFD\x8B\xEB\xEA\x63\xBD"
+ "\xB7\x46\xE7\xBF\x09\x9C\x0D\x0F"
+ "\x40\x86\x7F\x51\xE1\x11\x9C\xCB"
+ "\x88\xE6\x68\x47\xE3\x2B\xC5\xFF"
+ "\x09\x79\xA0\x43\x5C\x0D\x08\x58"
+ "\x17\xBB\xC0\x6B\x62\x3F\x56\xE9",
+ .ilen = 496,
+ .result = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB"
+ "\x54\xE0\x49\xB2\x1B\xA7\x10\x79"
+ "\x05\x6E\xD7\x40\xCC\x35\x9E\x07"
+ "\x93\xFC\x65\xF1\x5A\xC3\x2C\xB8"
+ "\x21\x8A\x16\x7F\xE8\x51\xDD\x46"
+ "\xAF\x18\xA4\x0D\x76\x02\x6B\xD4"
+ "\x3D\xC9\x32\x9B\x04\x90\xF9\x62"
+ "\xEE\x57\xC0\x29\xB5\x1E\x87\x13"
+ "\x7C\xE5\x4E\xDA\x43\xAC\x15\xA1"
+ "\x0A\x73\xFF\x68\xD1\x3A\xC6\x2F"
+ "\x98\x01\x8D\xF6\x5F\xEB\x54\xBD"
+ "\x26\xB2\x1B\x84\x10\x79\xE2\x4B"
+ "\xD7\x40\xA9\x12\x9E\x07\x70\xFC"
+ "\x65\xCE\x37\xC3\x2C\x95\x21\x8A"
+ "\xF3\x5C\xE8\x51\xBA\x23\xAF\x18"
+ "\x81\x0D\x76\xDF\x48\xD4\x3D\xA6"
+ "\x0F\x9B\x04\x6D\xF9\x62\xCB\x34"
+ "\xC0\x29\x92\x1E\x87\xF0\x59\xE5"
+ "\x4E\xB7\x20\xAC\x15\x7E\x0A\x73"
+ "\xDC\x45\xD1\x3A\xA3\x0C\x98\x01"
+ "\x6A\xF6\x5F\xC8\x31\xBD\x26\x8F"
+ "\x1B\x84\xED\x56\xE2\x4B\xB4\x1D"
+ "\xA9\x12\x7B\x07\x70\xD9\x42\xCE"
+ "\x37\xA0\x09\x95\xFE\x67\xF3\x5C"
+ "\xC5\x2E\xBA\x23\x8C\x18\x81\xEA"
+ "\x53\xDF\x48\xB1\x1A\xA6\x0F\x78"
+ "\x04\x6D\xD6\x3F\xCB\x34\x9D\x06"
+ "\x92\xFB\x64\xF0\x59\xC2\x2B\xB7"
+ "\x20\x89\x15\x7E\xE7\x50\xDC\x45"
+ "\xAE\x17\xA3\x0C\x75\x01\x6A\xD3"
+ "\x3C\xC8\x31\x9A\x03\x8F\xF8\x61"
+ "\xED\x56\xBF\x28\xB4\x1D\x86\x12",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -2996,6 +14005,143 @@ static struct cipher_testvec aes_cbc_enc_tv_template[] = {
"\xb2\xeb\x05\xe2\xc3\x9b\xe9\xfc"
"\xda\x6c\x19\x07\x8c\x6a\x9d\x1b",
.rlen = 64,
+ }, { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55"
+ "\x0F\x32\x55\x78\x9B\xBE\x78\x9B"
+ "\xBE\xE1\x04\x27\xE1\x04\x27\x4A"
+ "\x6D\x90\x4A\x6D\x90\xB3\xD6\xF9",
+ .klen = 32,
+ .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47"
+ "\xE2\x7D\x18\xD6\x71\x0C\xA7\x42",
+ .input = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB"
+ "\x54\xE0\x49\xB2\x1B\xA7\x10\x79"
+ "\x05\x6E\xD7\x40\xCC\x35\x9E\x07"
+ "\x93\xFC\x65\xF1\x5A\xC3\x2C\xB8"
+ "\x21\x8A\x16\x7F\xE8\x51\xDD\x46"
+ "\xAF\x18\xA4\x0D\x76\x02\x6B\xD4"
+ "\x3D\xC9\x32\x9B\x04\x90\xF9\x62"
+ "\xEE\x57\xC0\x29\xB5\x1E\x87\x13"
+ "\x7C\xE5\x4E\xDA\x43\xAC\x15\xA1"
+ "\x0A\x73\xFF\x68\xD1\x3A\xC6\x2F"
+ "\x98\x01\x8D\xF6\x5F\xEB\x54\xBD"
+ "\x26\xB2\x1B\x84\x10\x79\xE2\x4B"
+ "\xD7\x40\xA9\x12\x9E\x07\x70\xFC"
+ "\x65\xCE\x37\xC3\x2C\x95\x21\x8A"
+ "\xF3\x5C\xE8\x51\xBA\x23\xAF\x18"
+ "\x81\x0D\x76\xDF\x48\xD4\x3D\xA6"
+ "\x0F\x9B\x04\x6D\xF9\x62\xCB\x34"
+ "\xC0\x29\x92\x1E\x87\xF0\x59\xE5"
+ "\x4E\xB7\x20\xAC\x15\x7E\x0A\x73"
+ "\xDC\x45\xD1\x3A\xA3\x0C\x98\x01"
+ "\x6A\xF6\x5F\xC8\x31\xBD\x26\x8F"
+ "\x1B\x84\xED\x56\xE2\x4B\xB4\x1D"
+ "\xA9\x12\x7B\x07\x70\xD9\x42\xCE"
+ "\x37\xA0\x09\x95\xFE\x67\xF3\x5C"
+ "\xC5\x2E\xBA\x23\x8C\x18\x81\xEA"
+ "\x53\xDF\x48\xB1\x1A\xA6\x0F\x78"
+ "\x04\x6D\xD6\x3F\xCB\x34\x9D\x06"
+ "\x92\xFB\x64\xF0\x59\xC2\x2B\xB7"
+ "\x20\x89\x15\x7E\xE7\x50\xDC\x45"
+ "\xAE\x17\xA3\x0C\x75\x01\x6A\xD3"
+ "\x3C\xC8\x31\x9A\x03\x8F\xF8\x61"
+ "\xED\x56\xBF\x28\xB4\x1D\x86\x12",
+ .ilen = 496,
+ .result = "\xEA\x65\x8A\x19\xB0\x66\xC1\x3F"
+ "\xCE\xF1\x97\x75\xC1\xFD\xB5\xAF"
+ "\x52\x65\xF7\xFF\xBC\xD8\x2D\x9F"
+ "\x2F\xB9\x26\x9B\x6F\x10\xB7\xB8"
+ "\x26\xA1\x02\x46\xA2\xAD\xC6\xC0"
+ "\x11\x15\xFF\x6D\x1E\x82\x04\xA6"
+ "\xB1\x74\xD1\x08\x13\xFD\x90\x7C"
+ "\xF5\xED\xD3\xDB\x5A\x0A\x0C\x2F"
+ "\x0A\x70\xF1\x88\x07\xCF\x21\x26"
+ "\x40\x40\x8A\xF5\x53\xF7\x24\x4F"
+ "\x83\x38\x43\x5F\x08\x99\xEB\xE3"
+ "\xDC\x02\x64\x67\x50\x6E\x15\xC3"
+ "\x01\x1A\xA0\x81\x13\x65\xA6\x73"
+ "\x71\xA6\x3B\x91\x83\x77\xBE\xFA"
+ "\xDB\x71\x73\xA6\xC1\xAE\x43\xC3"
+ "\x36\xCE\xD6\xEB\xF9\x30\x1C\x4F"
+ "\x80\x38\x5E\x9C\x6E\xAB\x98\x2F"
+ "\x53\xAF\xCF\xC8\x9A\xB8\x86\x43"
+ "\x3E\x86\xE7\xA1\xF4\x2F\x30\x40"
+ "\x03\xA8\x6C\x50\x42\x9F\x77\x59"
+ "\x89\xA0\xC5\xEC\x9A\xB8\xDD\x99"
+ "\x16\x24\x02\x07\x48\xAE\xF2\x31"
+ "\x34\x0E\xC3\x85\xFE\x1C\x95\x99"
+ "\x87\x58\x98\x8B\xE7\xC6\xC5\x70"
+ "\x73\x81\x07\x7C\x56\x2F\xD8\x1B"
+ "\xB7\xB9\x2B\xAB\xE3\x01\x87\x0F"
+ "\xD8\xBB\xC0\x0D\xAC\x2C\x2F\x98"
+ "\x3C\x0B\xA2\x99\x4A\x8C\xF7\x04"
+ "\xE0\xE0\xCF\xD1\x81\x5B\xFE\xF5"
+ "\x24\x04\xFD\xB8\xDF\x13\xD8\xCD"
+ "\xF1\xE3\x3D\x98\x50\x02\x77\x9E"
+ "\xBC\x22\xAB\xFA\xC2\x43\x1F\x66"
+ "\x20\x02\x23\xDA\xDF\xA0\x89\xF6"
+ "\xD8\xF3\x45\x24\x53\x6F\x16\x77"
+ "\x02\x3E\x7B\x36\x5F\xA0\x3B\x78"
+ "\x63\xA2\xBD\xB5\xA4\xCA\x1E\xD3"
+ "\x57\xBC\x0B\x9F\x43\x51\x28\x4F"
+ "\x07\x50\x6C\x68\x12\x07\xCF\xFA"
+ "\x6B\x72\x0B\xEB\xF8\x88\x90\x2C"
+ "\x7E\xF5\x91\xD1\x03\xD8\xD5\xBD"
+ "\x22\x39\x7B\x16\x03\x01\x69\xAF"
+ "\x3D\x38\x66\x28\x0C\xBE\x5B\xC5"
+ "\x03\xB4\x2F\x51\x8A\x56\x17\x2B"
+ "\x88\x42\x6D\x40\x68\x8F\xD0\x11"
+ "\x19\xF9\x1F\x43\x79\x95\x31\xFA"
+ "\x28\x7A\x3D\xF7\x66\xEB\xEF\xAC"
+ "\x06\xB2\x01\xAD\xDB\x68\xDB\xEC"
+ "\x8D\x53\x6E\x72\x68\xA3\xC7\x63"
+ "\x43\x2B\x78\xE0\x04\x29\x8F\x72"
+ "\xB2\x2C\xE6\x84\x03\x30\x6D\xCD"
+ "\x26\x92\x37\xE1\x2F\xBB\x8B\x9D"
+ "\xE4\x4C\xF6\x93\xBC\xD9\xAD\x44"
+ "\x52\x65\xC7\xB0\x0E\x3F\x0E\x61"
+ "\x56\x5D\x1C\x6D\xA7\x05\x2E\xBC"
+ "\x58\x08\x15\xAB\x12\xAB\x17\x4A"
+ "\x5E\x1C\xF2\xCD\xB8\xA2\xAE\xFB"
+ "\x9B\x2E\x0E\x85\x34\x80\x0E\x3F"
+ "\x4C\xB8\xDB\xCE\x1C\x90\xA1\x61"
+ "\x6C\x69\x09\x35\x9E\xD4\xF4\xAD"
+ "\xBC\x06\x41\xE3\x01\xB4\x4E\x0A"
+ "\xE0\x1F\x91\xF8\x82\x96\x2D\x65"
+ "\xA3\xAA\x13\xCC\x50\xFF\x7B\x02",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -3078,6 +14224,1796 @@ static struct cipher_testvec aes_cbc_dec_tv_template[] = {
"\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
"\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
.rlen = 64,
+ }, { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55"
+ "\x0F\x32\x55\x78\x9B\xBE\x78\x9B"
+ "\xBE\xE1\x04\x27\xE1\x04\x27\x4A"
+ "\x6D\x90\x4A\x6D\x90\xB3\xD6\xF9",
+ .klen = 32,
+ .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47"
+ "\xE2\x7D\x18\xD6\x71\x0C\xA7\x42",
+ .input = "\xEA\x65\x8A\x19\xB0\x66\xC1\x3F"
+ "\xCE\xF1\x97\x75\xC1\xFD\xB5\xAF"
+ "\x52\x65\xF7\xFF\xBC\xD8\x2D\x9F"
+ "\x2F\xB9\x26\x9B\x6F\x10\xB7\xB8"
+ "\x26\xA1\x02\x46\xA2\xAD\xC6\xC0"
+ "\x11\x15\xFF\x6D\x1E\x82\x04\xA6"
+ "\xB1\x74\xD1\x08\x13\xFD\x90\x7C"
+ "\xF5\xED\xD3\xDB\x5A\x0A\x0C\x2F"
+ "\x0A\x70\xF1\x88\x07\xCF\x21\x26"
+ "\x40\x40\x8A\xF5\x53\xF7\x24\x4F"
+ "\x83\x38\x43\x5F\x08\x99\xEB\xE3"
+ "\xDC\x02\x64\x67\x50\x6E\x15\xC3"
+ "\x01\x1A\xA0\x81\x13\x65\xA6\x73"
+ "\x71\xA6\x3B\x91\x83\x77\xBE\xFA"
+ "\xDB\x71\x73\xA6\xC1\xAE\x43\xC3"
+ "\x36\xCE\xD6\xEB\xF9\x30\x1C\x4F"
+ "\x80\x38\x5E\x9C\x6E\xAB\x98\x2F"
+ "\x53\xAF\xCF\xC8\x9A\xB8\x86\x43"
+ "\x3E\x86\xE7\xA1\xF4\x2F\x30\x40"
+ "\x03\xA8\x6C\x50\x42\x9F\x77\x59"
+ "\x89\xA0\xC5\xEC\x9A\xB8\xDD\x99"
+ "\x16\x24\x02\x07\x48\xAE\xF2\x31"
+ "\x34\x0E\xC3\x85\xFE\x1C\x95\x99"
+ "\x87\x58\x98\x8B\xE7\xC6\xC5\x70"
+ "\x73\x81\x07\x7C\x56\x2F\xD8\x1B"
+ "\xB7\xB9\x2B\xAB\xE3\x01\x87\x0F"
+ "\xD8\xBB\xC0\x0D\xAC\x2C\x2F\x98"
+ "\x3C\x0B\xA2\x99\x4A\x8C\xF7\x04"
+ "\xE0\xE0\xCF\xD1\x81\x5B\xFE\xF5"
+ "\x24\x04\xFD\xB8\xDF\x13\xD8\xCD"
+ "\xF1\xE3\x3D\x98\x50\x02\x77\x9E"
+ "\xBC\x22\xAB\xFA\xC2\x43\x1F\x66"
+ "\x20\x02\x23\xDA\xDF\xA0\x89\xF6"
+ "\xD8\xF3\x45\x24\x53\x6F\x16\x77"
+ "\x02\x3E\x7B\x36\x5F\xA0\x3B\x78"
+ "\x63\xA2\xBD\xB5\xA4\xCA\x1E\xD3"
+ "\x57\xBC\x0B\x9F\x43\x51\x28\x4F"
+ "\x07\x50\x6C\x68\x12\x07\xCF\xFA"
+ "\x6B\x72\x0B\xEB\xF8\x88\x90\x2C"
+ "\x7E\xF5\x91\xD1\x03\xD8\xD5\xBD"
+ "\x22\x39\x7B\x16\x03\x01\x69\xAF"
+ "\x3D\x38\x66\x28\x0C\xBE\x5B\xC5"
+ "\x03\xB4\x2F\x51\x8A\x56\x17\x2B"
+ "\x88\x42\x6D\x40\x68\x8F\xD0\x11"
+ "\x19\xF9\x1F\x43\x79\x95\x31\xFA"
+ "\x28\x7A\x3D\xF7\x66\xEB\xEF\xAC"
+ "\x06\xB2\x01\xAD\xDB\x68\xDB\xEC"
+ "\x8D\x53\x6E\x72\x68\xA3\xC7\x63"
+ "\x43\x2B\x78\xE0\x04\x29\x8F\x72"
+ "\xB2\x2C\xE6\x84\x03\x30\x6D\xCD"
+ "\x26\x92\x37\xE1\x2F\xBB\x8B\x9D"
+ "\xE4\x4C\xF6\x93\xBC\xD9\xAD\x44"
+ "\x52\x65\xC7\xB0\x0E\x3F\x0E\x61"
+ "\x56\x5D\x1C\x6D\xA7\x05\x2E\xBC"
+ "\x58\x08\x15\xAB\x12\xAB\x17\x4A"
+ "\x5E\x1C\xF2\xCD\xB8\xA2\xAE\xFB"
+ "\x9B\x2E\x0E\x85\x34\x80\x0E\x3F"
+ "\x4C\xB8\xDB\xCE\x1C\x90\xA1\x61"
+ "\x6C\x69\x09\x35\x9E\xD4\xF4\xAD"
+ "\xBC\x06\x41\xE3\x01\xB4\x4E\x0A"
+ "\xE0\x1F\x91\xF8\x82\x96\x2D\x65"
+ "\xA3\xAA\x13\xCC\x50\xFF\x7B\x02",
+ .ilen = 496,
+ .result = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB"
+ "\x54\xE0\x49\xB2\x1B\xA7\x10\x79"
+ "\x05\x6E\xD7\x40\xCC\x35\x9E\x07"
+ "\x93\xFC\x65\xF1\x5A\xC3\x2C\xB8"
+ "\x21\x8A\x16\x7F\xE8\x51\xDD\x46"
+ "\xAF\x18\xA4\x0D\x76\x02\x6B\xD4"
+ "\x3D\xC9\x32\x9B\x04\x90\xF9\x62"
+ "\xEE\x57\xC0\x29\xB5\x1E\x87\x13"
+ "\x7C\xE5\x4E\xDA\x43\xAC\x15\xA1"
+ "\x0A\x73\xFF\x68\xD1\x3A\xC6\x2F"
+ "\x98\x01\x8D\xF6\x5F\xEB\x54\xBD"
+ "\x26\xB2\x1B\x84\x10\x79\xE2\x4B"
+ "\xD7\x40\xA9\x12\x9E\x07\x70\xFC"
+ "\x65\xCE\x37\xC3\x2C\x95\x21\x8A"
+ "\xF3\x5C\xE8\x51\xBA\x23\xAF\x18"
+ "\x81\x0D\x76\xDF\x48\xD4\x3D\xA6"
+ "\x0F\x9B\x04\x6D\xF9\x62\xCB\x34"
+ "\xC0\x29\x92\x1E\x87\xF0\x59\xE5"
+ "\x4E\xB7\x20\xAC\x15\x7E\x0A\x73"
+ "\xDC\x45\xD1\x3A\xA3\x0C\x98\x01"
+ "\x6A\xF6\x5F\xC8\x31\xBD\x26\x8F"
+ "\x1B\x84\xED\x56\xE2\x4B\xB4\x1D"
+ "\xA9\x12\x7B\x07\x70\xD9\x42\xCE"
+ "\x37\xA0\x09\x95\xFE\x67\xF3\x5C"
+ "\xC5\x2E\xBA\x23\x8C\x18\x81\xEA"
+ "\x53\xDF\x48\xB1\x1A\xA6\x0F\x78"
+ "\x04\x6D\xD6\x3F\xCB\x34\x9D\x06"
+ "\x92\xFB\x64\xF0\x59\xC2\x2B\xB7"
+ "\x20\x89\x15\x7E\xE7\x50\xDC\x45"
+ "\xAE\x17\xA3\x0C\x75\x01\x6A\xD3"
+ "\x3C\xC8\x31\x9A\x03\x8F\xF8\x61"
+ "\xED\x56\xBF\x28\xB4\x1D\x86\x12",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct aead_testvec hmac_md5_ecb_cipher_null_enc_tv_template[] = {
+ { /* Input data from RFC 2410 Case 1 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x00" /* enc key length */
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .klen = 8 + 16 + 0,
+ .iv = "",
+ .input = "\x01\x23\x45\x67\x89\xab\xcd\xef",
+ .ilen = 8,
+ .result = "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\xaa\x42\xfe\x43\x8d\xea\xa3\x5a"
+ "\xb9\x3d\x9f\xb1\xa3\x8e\x9b\xae",
+ .rlen = 8 + 16,
+ }, { /* Input data from RFC 2410 Case 2 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x00" /* enc key length */
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .klen = 8 + 16 + 0,
+ .iv = "",
+ .input = "Network Security People Have A Strange Sense Of Humor",
+ .ilen = 53,
+ .result = "Network Security People Have A Strange Sense Of Humor"
+ "\x73\xa5\x3e\x1c\x08\x0e\x8a\x8a"
+ "\x8e\xb5\x5f\x90\x8e\xfe\x13\x23",
+ .rlen = 53 + 16,
+ },
+};
+
+static struct aead_testvec hmac_md5_ecb_cipher_null_dec_tv_template[] = {
+ {
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x00" /* enc key length */
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .klen = 8 + 16 + 0,
+ .iv = "",
+ .input = "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\xaa\x42\xfe\x43\x8d\xea\xa3\x5a"
+ "\xb9\x3d\x9f\xb1\xa3\x8e\x9b\xae",
+ .ilen = 8 + 16,
+ .result = "\x01\x23\x45\x67\x89\xab\xcd\xef",
+ .rlen = 8,
+ }, {
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x00" /* enc key length */
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .klen = 8 + 16 + 0,
+ .iv = "",
+ .input = "Network Security People Have A Strange Sense Of Humor"
+ "\x73\xa5\x3e\x1c\x08\x0e\x8a\x8a"
+ "\x8e\xb5\x5f\x90\x8e\xfe\x13\x23",
+ .ilen = 53 + 16,
+ .result = "Network Security People Have A Strange Sense Of Humor",
+ .rlen = 53,
+ },
+};
+
+static struct aead_testvec hmac_sha1_aes_cbc_enc_tv_temp[] = {
+ { /* RFC 3602 Case 1 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00"
+ "\x06\xa9\x21\x40\x36\xb8\xa1\x5b"
+ "\x51\x2e\x03\xd5\x34\x12\x00\x06",
+ .klen = 8 + 20 + 16,
+ .iv = "\x3d\xaf\xba\x42\x9d\x9e\xb4\x30"
+ "\xb4\x22\xda\x80\x2c\x9f\xac\x41",
+ .input = "Single block msg",
+ .ilen = 16,
+ .result = "\xe3\x53\x77\x9c\x10\x79\xae\xb8"
+ "\x27\x08\x94\x2d\xbe\x77\x18\x1a"
+ "\x1b\x13\xcb\xaf\x89\x5e\xe1\x2c"
+ "\x13\xc5\x2e\xa3\xcc\xed\xdc\xb5"
+ "\x03\x71\xa2\x06",
+ .rlen = 16 + 20,
+ }, { /* RFC 3602 Case 2 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33"
+ "\xc2\x86\x69\x6d\x88\x7c\x9a\xa0"
+ "\x61\x1b\xbb\x3e\x20\x25\xa4\x5a",
+ .klen = 8 + 20 + 16,
+ .iv = "\x56\x2e\x17\x99\x6d\x09\x3d\x28"
+ "\xdd\xb3\xba\x69\x5a\x2e\x6f\x58",
+ .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f",
+ .ilen = 32,
+ .result = "\xd2\x96\xcd\x94\xc2\xcc\xcf\x8a"
+ "\x3a\x86\x30\x28\xb5\xe1\xdc\x0a"
+ "\x75\x86\x60\x2d\x25\x3c\xff\xf9"
+ "\x1b\x82\x66\xbe\xa6\xd6\x1a\xb1"
+ "\xad\x9b\x4c\x5c\x85\xe1\xda\xae"
+ "\xee\x81\x4e\xd7\xdb\x74\xcf\x58"
+ "\x65\x39\xf8\xde",
+ .rlen = 32 + 20,
+ }, { /* RFC 3602 Case 3 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55"
+ "\x6c\x3e\xa0\x47\x76\x30\xce\x21"
+ "\xa2\xce\x33\x4a\xa7\x46\xc2\xcd",
+ .klen = 8 + 20 + 16,
+ .iv = "\xc7\x82\xdc\x4c\x09\x8c\x66\xcb"
+ "\xd9\xcd\x27\xd8\x25\x68\x2c\x81",
+ .input = "This is a 48-byte message (exactly 3 AES blocks)",
+ .ilen = 48,
+ .result = "\xd0\xa0\x2b\x38\x36\x45\x17\x53"
+ "\xd4\x93\x66\x5d\x33\xf0\xe8\x86"
+ "\x2d\xea\x54\xcd\xb2\x93\xab\xc7"
+ "\x50\x69\x39\x27\x67\x72\xf8\xd5"
+ "\x02\x1c\x19\x21\x6b\xad\x52\x5c"
+ "\x85\x79\x69\x5d\x83\xba\x26\x84"
+ "\xc2\xec\x0c\xf8\x7f\x05\xba\xca"
+ "\xff\xee\x4c\xd0\x93\xe6\x36\x7f"
+ "\x8d\x62\xf2\x1e",
+ .rlen = 48 + 20,
+ }, { /* RFC 3602 Case 4 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55"
+ "\x56\xe4\x7a\x38\xc5\x59\x89\x74"
+ "\xbc\x46\x90\x3d\xba\x29\x03\x49",
+ .klen = 8 + 20 + 16,
+ .iv = "\x8c\xe8\x2e\xef\xbe\xa0\xda\x3c"
+ "\x44\x69\x9e\xd7\xdb\x51\xb7\xd9",
+ .input = "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf",
+ .ilen = 64,
+ .result = "\xc3\x0e\x32\xff\xed\xc0\x77\x4e"
+ "\x6a\xff\x6a\xf0\x86\x9f\x71\xaa"
+ "\x0f\x3a\xf0\x7a\x9a\x31\xa9\xc6"
+ "\x84\xdb\x20\x7e\xb0\xef\x8e\x4e"
+ "\x35\x90\x7a\xa6\x32\xc3\xff\xdf"
+ "\x86\x8b\xb7\xb2\x9d\x3d\x46\xad"
+ "\x83\xce\x9f\x9a\x10\x2e\xe9\x9d"
+ "\x49\xa5\x3e\x87\xf4\xc3\xda\x55"
+ "\x1c\x45\x57\xa9\x56\xcb\xa9\x2d"
+ "\x18\xac\xf1\xc7\x5d\xd1\xcd\x0d"
+ "\x1d\xbe\xc6\xe9",
+ .rlen = 64 + 20,
+ }, { /* RFC 3602 Case 5 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55"
+ "\x90\xd3\x82\xb4\x10\xee\xba\x7a"
+ "\xd9\x38\xc4\x6c\xec\x1a\x82\xbf",
+ .klen = 8 + 20 + 16,
+ .iv = "\xe9\x6e\x8c\x08\xab\x46\x57\x63"
+ "\xfd\x09\x8d\x45\xdd\x3f\xf8\x93",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x08\x00\x0e\xbd\xa7\x0a\x00\x00"
+ "\x8e\x9c\x08\x3d\xb9\x5b\x07\x00"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x01\x02\x03\x04\x05\x06\x07\x08"
+ "\x09\x0a\x0b\x0c\x0d\x0e\x0e\x01",
+ .ilen = 80,
+ .result = "\xf6\x63\xc2\x5d\x32\x5c\x18\xc6"
+ "\xa9\x45\x3e\x19\x4e\x12\x08\x49"
+ "\xa4\x87\x0b\x66\xcc\x6b\x99\x65"
+ "\x33\x00\x13\xb4\x89\x8d\xc8\x56"
+ "\xa4\x69\x9e\x52\x3a\x55\xdb\x08"
+ "\x0b\x59\xec\x3a\x8e\x4b\x7e\x52"
+ "\x77\x5b\x07\xd1\xdb\x34\xed\x9c"
+ "\x53\x8a\xb5\x0c\x55\x1b\x87\x4a"
+ "\xa2\x69\xad\xd0\x47\xad\x2d\x59"
+ "\x13\xac\x19\xb7\xcf\xba\xd4\xa6"
+ "\x58\xc6\x84\x75\xe4\xe9\x6b\x0c"
+ "\xe1\xc5\x0b\x73\x4d\x82\x55\xa8"
+ "\x85\xe1\x59\xf7",
+ .rlen = 80 + 20,
+ }, { /* NIST SP800-38A F.2.3 CBC-AES192.Encrypt */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x18" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55"
+ "\x8e\x73\xb0\xf7\xda\x0e\x64\x52"
+ "\xc8\x10\xf3\x2b\x80\x90\x79\xe5"
+ "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b",
+ .klen = 8 + 20 + 24,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .ilen = 64,
+ .result = "\x4f\x02\x1d\xb2\x43\xbc\x63\x3d"
+ "\x71\x78\x18\x3a\x9f\xa0\x71\xe8"
+ "\xb4\xd9\xad\xa9\xad\x7d\xed\xf4"
+ "\xe5\xe7\x38\x76\x3f\x69\x14\x5a"
+ "\x57\x1b\x24\x20\x12\xfb\x7a\xe0"
+ "\x7f\xa9\xba\xac\x3d\xf1\x02\xe0"
+ "\x08\xb0\xe2\x79\x88\x59\x88\x81"
+ "\xd9\x20\xa9\xe6\x4f\x56\x15\xcd"
+ "\x73\xe3\x19\x3f\x8b\xc9\xc6\xf4"
+ "\x5a\xf1\x5b\xa8\x98\x07\xc5\x36"
+ "\x47\x4c\xfc\x36",
+ .rlen = 64 + 20,
+ }, { /* NIST SP800-38A F.2.5 CBC-AES256.Encrypt */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x20" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55"
+ "\x60\x3d\xeb\x10\x15\xca\x71\xbe"
+ "\x2b\x73\xae\xf0\x85\x7d\x77\x81"
+ "\x1f\x35\x2c\x07\x3b\x61\x08\xd7"
+ "\x2d\x98\x10\xa3\x09\x14\xdf\xf4",
+ .klen = 8 + 20 + 32,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .ilen = 64,
+ .result = "\xf5\x8c\x4c\x04\xd6\xe5\xf1\xba"
+ "\x77\x9e\xab\xfb\x5f\x7b\xfb\xd6"
+ "\x9c\xfc\x4e\x96\x7e\xdb\x80\x8d"
+ "\x67\x9f\x77\x7b\xc6\x70\x2c\x7d"
+ "\x39\xf2\x33\x69\xa9\xd9\xba\xcf"
+ "\xa5\x30\xe2\x63\x04\x23\x14\x61"
+ "\xb2\xeb\x05\xe2\xc3\x9b\xe9\xfc"
+ "\xda\x6c\x19\x07\x8c\x6a\x9d\x1b"
+ "\xa3\xe8\x9b\x17\xe3\xf4\x7f\xde"
+ "\x1b\x9f\xc6\x81\x26\x43\x4a\x87"
+ "\x51\xee\xd6\x4e",
+ .rlen = 64 + 20,
+ },
+};
+
+static struct aead_testvec hmac_sha1_ecb_cipher_null_enc_tv_temp[] = {
+ { /* Input data from RFC 2410 Case 1 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x00" /* enc key length */
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00",
+ .klen = 8 + 20 + 0,
+ .iv = "",
+ .input = "\x01\x23\x45\x67\x89\xab\xcd\xef",
+ .ilen = 8,
+ .result = "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\x40\xc3\x0a\xa1\xc9\xa0\x28\xab"
+ "\x99\x5e\x19\x04\xd1\x72\xef\xb8"
+ "\x8c\x5e\xe4\x08",
+ .rlen = 8 + 20,
+ }, { /* Input data from RFC 2410 Case 2 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x00" /* enc key length */
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00",
+ .klen = 8 + 20 + 0,
+ .iv = "",
+ .input = "Network Security People Have A Strange Sense Of Humor",
+ .ilen = 53,
+ .result = "Network Security People Have A Strange Sense Of Humor"
+ "\x75\x6f\x42\x1e\xf8\x50\x21\xd2"
+ "\x65\x47\xee\x8e\x1a\xef\x16\xf6"
+ "\x91\x56\xe4\xd6",
+ .rlen = 53 + 20,
+ },
+};
+
+static struct aead_testvec hmac_sha1_ecb_cipher_null_dec_tv_temp[] = {
+ {
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x00" /* enc key length */
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00",
+ .klen = 8 + 20 + 0,
+ .iv = "",
+ .input = "\x01\x23\x45\x67\x89\xab\xcd\xef"
+ "\x40\xc3\x0a\xa1\xc9\xa0\x28\xab"
+ "\x99\x5e\x19\x04\xd1\x72\xef\xb8"
+ "\x8c\x5e\xe4\x08",
+ .ilen = 8 + 20,
+ .result = "\x01\x23\x45\x67\x89\xab\xcd\xef",
+ .rlen = 8,
+ }, {
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x00" /* enc key length */
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00",
+ .klen = 8 + 20 + 0,
+ .iv = "",
+ .input = "Network Security People Have A Strange Sense Of Humor"
+ "\x75\x6f\x42\x1e\xf8\x50\x21\xd2"
+ "\x65\x47\xee\x8e\x1a\xef\x16\xf6"
+ "\x91\x56\xe4\xd6",
+ .ilen = 53 + 20,
+ .result = "Network Security People Have A Strange Sense Of Humor",
+ .rlen = 53,
+ },
+};
+
+static struct aead_testvec hmac_sha256_aes_cbc_enc_tv_temp[] = {
+ { /* RFC 3602 Case 1 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x06\xa9\x21\x40\x36\xb8\xa1\x5b"
+ "\x51\x2e\x03\xd5\x34\x12\x00\x06",
+ .klen = 8 + 32 + 16,
+ .iv = "\x3d\xaf\xba\x42\x9d\x9e\xb4\x30"
+ "\xb4\x22\xda\x80\x2c\x9f\xac\x41",
+ .input = "Single block msg",
+ .ilen = 16,
+ .result = "\xe3\x53\x77\x9c\x10\x79\xae\xb8"
+ "\x27\x08\x94\x2d\xbe\x77\x18\x1a"
+ "\xcc\xde\x2d\x6a\xae\xf1\x0b\xcc"
+ "\x38\x06\x38\x51\xb4\xb8\xf3\x5b"
+ "\x5c\x34\xa6\xa3\x6e\x0b\x05\xe5"
+ "\x6a\x6d\x44\xaa\x26\xa8\x44\xa5",
+ .rlen = 16 + 32,
+ }, { /* RFC 3602 Case 2 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\xc2\x86\x69\x6d\x88\x7c\x9a\xa0"
+ "\x61\x1b\xbb\x3e\x20\x25\xa4\x5a",
+ .klen = 8 + 32 + 16,
+ .iv = "\x56\x2e\x17\x99\x6d\x09\x3d\x28"
+ "\xdd\xb3\xba\x69\x5a\x2e\x6f\x58",
+ .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f",
+ .ilen = 32,
+ .result = "\xd2\x96\xcd\x94\xc2\xcc\xcf\x8a"
+ "\x3a\x86\x30\x28\xb5\xe1\xdc\x0a"
+ "\x75\x86\x60\x2d\x25\x3c\xff\xf9"
+ "\x1b\x82\x66\xbe\xa6\xd6\x1a\xb1"
+ "\xf5\x33\x53\xf3\x68\x85\x2a\x99"
+ "\x0e\x06\x58\x8f\xba\xf6\x06\xda"
+ "\x49\x69\x0d\x5b\xd4\x36\x06\x62"
+ "\x35\x5e\x54\x58\x53\x4d\xdf\xbf",
+ .rlen = 32 + 32,
+ }, { /* RFC 3602 Case 3 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x6c\x3e\xa0\x47\x76\x30\xce\x21"
+ "\xa2\xce\x33\x4a\xa7\x46\xc2\xcd",
+ .klen = 8 + 32 + 16,
+ .iv = "\xc7\x82\xdc\x4c\x09\x8c\x66\xcb"
+ "\xd9\xcd\x27\xd8\x25\x68\x2c\x81",
+ .input = "This is a 48-byte message (exactly 3 AES blocks)",
+ .ilen = 48,
+ .result = "\xd0\xa0\x2b\x38\x36\x45\x17\x53"
+ "\xd4\x93\x66\x5d\x33\xf0\xe8\x86"
+ "\x2d\xea\x54\xcd\xb2\x93\xab\xc7"
+ "\x50\x69\x39\x27\x67\x72\xf8\xd5"
+ "\x02\x1c\x19\x21\x6b\xad\x52\x5c"
+ "\x85\x79\x69\x5d\x83\xba\x26\x84"
+ "\x68\xb9\x3e\x90\x38\xa0\x88\x01"
+ "\xe7\xc6\xce\x10\x31\x2f\x9b\x1d"
+ "\x24\x78\xfb\xbe\x02\xe0\x4f\x40"
+ "\x10\xbd\xaa\xc6\xa7\x79\xe0\x1a",
+ .rlen = 48 + 32,
+ }, { /* RFC 3602 Case 4 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x56\xe4\x7a\x38\xc5\x59\x89\x74"
+ "\xbc\x46\x90\x3d\xba\x29\x03\x49",
+ .klen = 8 + 32 + 16,
+ .iv = "\x8c\xe8\x2e\xef\xbe\xa0\xda\x3c"
+ "\x44\x69\x9e\xd7\xdb\x51\xb7\xd9",
+ .input = "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf",
+ .ilen = 64,
+ .result = "\xc3\x0e\x32\xff\xed\xc0\x77\x4e"
+ "\x6a\xff\x6a\xf0\x86\x9f\x71\xaa"
+ "\x0f\x3a\xf0\x7a\x9a\x31\xa9\xc6"
+ "\x84\xdb\x20\x7e\xb0\xef\x8e\x4e"
+ "\x35\x90\x7a\xa6\x32\xc3\xff\xdf"
+ "\x86\x8b\xb7\xb2\x9d\x3d\x46\xad"
+ "\x83\xce\x9f\x9a\x10\x2e\xe9\x9d"
+ "\x49\xa5\x3e\x87\xf4\xc3\xda\x55"
+ "\x7a\x1b\xd4\x3c\xdb\x17\x95\xe2"
+ "\xe0\x93\xec\xc9\x9f\xf7\xce\xd8"
+ "\x3f\x54\xe2\x49\x39\xe3\x71\x25"
+ "\x2b\x6c\xe9\x5d\xec\xec\x2b\x64",
+ .rlen = 64 + 32,
+ }, { /* RFC 3602 Case 5 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x90\xd3\x82\xb4\x10\xee\xba\x7a"
+ "\xd9\x38\xc4\x6c\xec\x1a\x82\xbf",
+ .klen = 8 + 32 + 16,
+ .iv = "\xe9\x6e\x8c\x08\xab\x46\x57\x63"
+ "\xfd\x09\x8d\x45\xdd\x3f\xf8\x93",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x08\x00\x0e\xbd\xa7\x0a\x00\x00"
+ "\x8e\x9c\x08\x3d\xb9\x5b\x07\x00"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x01\x02\x03\x04\x05\x06\x07\x08"
+ "\x09\x0a\x0b\x0c\x0d\x0e\x0e\x01",
+ .ilen = 80,
+ .result = "\xf6\x63\xc2\x5d\x32\x5c\x18\xc6"
+ "\xa9\x45\x3e\x19\x4e\x12\x08\x49"
+ "\xa4\x87\x0b\x66\xcc\x6b\x99\x65"
+ "\x33\x00\x13\xb4\x89\x8d\xc8\x56"
+ "\xa4\x69\x9e\x52\x3a\x55\xdb\x08"
+ "\x0b\x59\xec\x3a\x8e\x4b\x7e\x52"
+ "\x77\x5b\x07\xd1\xdb\x34\xed\x9c"
+ "\x53\x8a\xb5\x0c\x55\x1b\x87\x4a"
+ "\xa2\x69\xad\xd0\x47\xad\x2d\x59"
+ "\x13\xac\x19\xb7\xcf\xba\xd4\xa6"
+ "\xbb\xd4\x0f\xbe\xa3\x3b\x4c\xb8"
+ "\x3a\xd2\xe1\x03\x86\xa5\x59\xb7"
+ "\x73\xc3\x46\x20\x2c\xb1\xef\x68"
+ "\xbb\x8a\x32\x7e\x12\x8c\x69\xcf",
+ .rlen = 80 + 32,
+ }, { /* NIST SP800-38A F.2.3 CBC-AES192.Encrypt */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x18" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x8e\x73\xb0\xf7\xda\x0e\x64\x52"
+ "\xc8\x10\xf3\x2b\x80\x90\x79\xe5"
+ "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b",
+ .klen = 8 + 32 + 24,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .ilen = 64,
+ .result = "\x4f\x02\x1d\xb2\x43\xbc\x63\x3d"
+ "\x71\x78\x18\x3a\x9f\xa0\x71\xe8"
+ "\xb4\xd9\xad\xa9\xad\x7d\xed\xf4"
+ "\xe5\xe7\x38\x76\x3f\x69\x14\x5a"
+ "\x57\x1b\x24\x20\x12\xfb\x7a\xe0"
+ "\x7f\xa9\xba\xac\x3d\xf1\x02\xe0"
+ "\x08\xb0\xe2\x79\x88\x59\x88\x81"
+ "\xd9\x20\xa9\xe6\x4f\x56\x15\xcd"
+ "\x2f\xee\x5f\xdb\x66\xfe\x79\x09"
+ "\x61\x81\x31\xea\x5b\x3d\x8e\xfb"
+ "\xca\x71\x85\x93\xf7\x85\x55\x8b"
+ "\x7a\xe4\x94\xca\x8b\xba\x19\x33",
+ .rlen = 64 + 32,
+ }, { /* NIST SP800-38A F.2.5 CBC-AES256.Encrypt */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x20" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x60\x3d\xeb\x10\x15\xca\x71\xbe"
+ "\x2b\x73\xae\xf0\x85\x7d\x77\x81"
+ "\x1f\x35\x2c\x07\x3b\x61\x08\xd7"
+ "\x2d\x98\x10\xa3\x09\x14\xdf\xf4",
+ .klen = 8 + 32 + 32,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .ilen = 64,
+ .result = "\xf5\x8c\x4c\x04\xd6\xe5\xf1\xba"
+ "\x77\x9e\xab\xfb\x5f\x7b\xfb\xd6"
+ "\x9c\xfc\x4e\x96\x7e\xdb\x80\x8d"
+ "\x67\x9f\x77\x7b\xc6\x70\x2c\x7d"
+ "\x39\xf2\x33\x69\xa9\xd9\xba\xcf"
+ "\xa5\x30\xe2\x63\x04\x23\x14\x61"
+ "\xb2\xeb\x05\xe2\xc3\x9b\xe9\xfc"
+ "\xda\x6c\x19\x07\x8c\x6a\x9d\x1b"
+ "\x24\x29\xed\xc2\x31\x49\xdb\xb1"
+ "\x8f\x74\xbd\x17\x92\x03\xbe\x8f"
+ "\xf3\x61\xde\x1c\xe9\xdb\xcd\xd0"
+ "\xcc\xce\xe9\x85\x57\xcf\x6f\x5f",
+ .rlen = 64 + 32,
+ },
+};
+
+static struct aead_testvec hmac_sha512_aes_cbc_enc_tv_temp[] = {
+ { /* RFC 3602 Case 1 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x06\xa9\x21\x40\x36\xb8\xa1\x5b"
+ "\x51\x2e\x03\xd5\x34\x12\x00\x06",
+ .klen = 8 + 64 + 16,
+ .iv = "\x3d\xaf\xba\x42\x9d\x9e\xb4\x30"
+ "\xb4\x22\xda\x80\x2c\x9f\xac\x41",
+ .input = "Single block msg",
+ .ilen = 16,
+ .result = "\xe3\x53\x77\x9c\x10\x79\xae\xb8"
+ "\x27\x08\x94\x2d\xbe\x77\x18\x1a"
+ "\x3f\xdc\xad\x90\x03\x63\x5e\x68"
+ "\xc3\x13\xdd\xa4\x5c\x4d\x54\xa7"
+ "\x19\x6e\x03\x75\x2b\xa1\x62\xce"
+ "\xe0\xc6\x96\x75\xb2\x14\xca\x96"
+ "\xec\xbd\x50\x08\x07\x64\x1a\x49"
+ "\xe8\x9a\x7c\x06\x3d\xcb\xff\xb2"
+ "\xfa\x20\x89\xdd\x9c\xac\x9e\x16"
+ "\x18\x8a\xa0\x6d\x01\x6c\xa3\x3a",
+ .rlen = 16 + 64,
+ }, { /* RFC 3602 Case 2 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\xc2\x86\x69\x6d\x88\x7c\x9a\xa0"
+ "\x61\x1b\xbb\x3e\x20\x25\xa4\x5a",
+ .klen = 8 + 64 + 16,
+ .iv = "\x56\x2e\x17\x99\x6d\x09\x3d\x28"
+ "\xdd\xb3\xba\x69\x5a\x2e\x6f\x58",
+ .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f",
+ .ilen = 32,
+ .result = "\xd2\x96\xcd\x94\xc2\xcc\xcf\x8a"
+ "\x3a\x86\x30\x28\xb5\xe1\xdc\x0a"
+ "\x75\x86\x60\x2d\x25\x3c\xff\xf9"
+ "\x1b\x82\x66\xbe\xa6\xd6\x1a\xb1"
+ "\xda\xb2\x0c\xb2\x26\xc4\xd5\xef"
+ "\x60\x38\xa4\x5e\x9a\x8c\x1b\x41"
+ "\x03\x9f\xc4\x64\x7f\x01\x42\x9b"
+ "\x0e\x1b\xea\xef\xbc\x88\x19\x5e"
+ "\x31\x7e\xc2\x95\xfc\x09\x32\x0a"
+ "\x46\x32\x7c\x41\x9c\x59\x3e\xe9"
+ "\x8f\x9f\xd4\x31\xd6\x22\xbd\xf8"
+ "\xf7\x0a\x94\xe5\xa9\xc3\xf6\x9d",
+ .rlen = 32 + 64,
+ }, { /* RFC 3602 Case 3 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x33\x44\x55\x66\x77\x88\x99\xaa"
+ "\xbb\xcc\xdd\xee\xff\x11\x22\x33"
+ "\x44\x55\x66\x77\x88\x99\xaa\xbb"
+ "\xcc\xdd\xee\xff\x11\x22\x33\x44"
+ "\x6c\x3e\xa0\x47\x76\x30\xce\x21"
+ "\xa2\xce\x33\x4a\xa7\x46\xc2\xcd",
+ .klen = 8 + 64 + 16,
+ .iv = "\xc7\x82\xdc\x4c\x09\x8c\x66\xcb"
+ "\xd9\xcd\x27\xd8\x25\x68\x2c\x81",
+ .input = "This is a 48-byte message (exactly 3 AES blocks)",
+ .ilen = 48,
+ .result = "\xd0\xa0\x2b\x38\x36\x45\x17\x53"
+ "\xd4\x93\x66\x5d\x33\xf0\xe8\x86"
+ "\x2d\xea\x54\xcd\xb2\x93\xab\xc7"
+ "\x50\x69\x39\x27\x67\x72\xf8\xd5"
+ "\x02\x1c\x19\x21\x6b\xad\x52\x5c"
+ "\x85\x79\x69\x5d\x83\xba\x26\x84"
+ "\x64\x19\x17\x5b\x57\xe0\x21\x0f"
+ "\xca\xdb\xa1\x26\x38\x14\xa2\x69"
+ "\xdb\x54\x67\x80\xc0\x54\xe0\xfd"
+ "\x3e\x91\xe7\x91\x7f\x13\x38\x44"
+ "\xb7\xb1\xd6\xc8\x7d\x48\x8d\x41"
+ "\x08\xea\x29\x6c\x74\x67\x3f\xb0"
+ "\xac\x7f\x5c\x1d\xf5\xee\x22\x66"
+ "\x27\xa6\xb6\x13\xba\xba\xf0\xc2",
+ .rlen = 48 + 64,
+ }, { /* RFC 3602 Case 4 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x33\x44\x55\x66\x77\x88\x99\xaa"
+ "\xbb\xcc\xdd\xee\xff\x11\x22\x33"
+ "\x44\x55\x66\x77\x88\x99\xaa\xbb"
+ "\xcc\xdd\xee\xff\x11\x22\x33\x44"
+ "\x56\xe4\x7a\x38\xc5\x59\x89\x74"
+ "\xbc\x46\x90\x3d\xba\x29\x03\x49",
+ .klen = 8 + 64 + 16,
+ .iv = "\x8c\xe8\x2e\xef\xbe\xa0\xda\x3c"
+ "\x44\x69\x9e\xd7\xdb\x51\xb7\xd9",
+ .input = "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf",
+ .ilen = 64,
+ .result = "\xc3\x0e\x32\xff\xed\xc0\x77\x4e"
+ "\x6a\xff\x6a\xf0\x86\x9f\x71\xaa"
+ "\x0f\x3a\xf0\x7a\x9a\x31\xa9\xc6"
+ "\x84\xdb\x20\x7e\xb0\xef\x8e\x4e"
+ "\x35\x90\x7a\xa6\x32\xc3\xff\xdf"
+ "\x86\x8b\xb7\xb2\x9d\x3d\x46\xad"
+ "\x83\xce\x9f\x9a\x10\x2e\xe9\x9d"
+ "\x49\xa5\x3e\x87\xf4\xc3\xda\x55"
+ "\x82\xcd\x42\x28\x21\x20\x15\xcc"
+ "\xb7\xb2\x48\x40\xc7\x64\x41\x3a"
+ "\x61\x32\x82\x85\xcf\x27\xed\xb4"
+ "\xe4\x68\xa2\xf5\x79\x26\x27\xb2"
+ "\x51\x67\x6a\xc4\xf0\x66\x55\x50"
+ "\xbc\x6f\xed\xd5\x8d\xde\x23\x7c"
+ "\x62\x98\x14\xd7\x2f\x37\x8d\xdf"
+ "\xf4\x33\x80\xeb\x8e\xb4\xa4\xda",
+ .rlen = 64 + 64,
+ }, { /* RFC 3602 Case 5 */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x10" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x33\x44\x55\x66\x77\x88\x99\xaa"
+ "\xbb\xcc\xdd\xee\xff\x11\x22\x33"
+ "\x44\x55\x66\x77\x88\x99\xaa\xbb"
+ "\xcc\xdd\xee\xff\x11\x22\x33\x44"
+ "\x90\xd3\x82\xb4\x10\xee\xba\x7a"
+ "\xd9\x38\xc4\x6c\xec\x1a\x82\xbf",
+ .klen = 8 + 64 + 16,
+ .iv = "\xe9\x6e\x8c\x08\xab\x46\x57\x63"
+ "\xfd\x09\x8d\x45\xdd\x3f\xf8\x93",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x08\x00\x0e\xbd\xa7\x0a\x00\x00"
+ "\x8e\x9c\x08\x3d\xb9\x5b\x07\x00"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x01\x02\x03\x04\x05\x06\x07\x08"
+ "\x09\x0a\x0b\x0c\x0d\x0e\x0e\x01",
+ .ilen = 80,
+ .result = "\xf6\x63\xc2\x5d\x32\x5c\x18\xc6"
+ "\xa9\x45\x3e\x19\x4e\x12\x08\x49"
+ "\xa4\x87\x0b\x66\xcc\x6b\x99\x65"
+ "\x33\x00\x13\xb4\x89\x8d\xc8\x56"
+ "\xa4\x69\x9e\x52\x3a\x55\xdb\x08"
+ "\x0b\x59\xec\x3a\x8e\x4b\x7e\x52"
+ "\x77\x5b\x07\xd1\xdb\x34\xed\x9c"
+ "\x53\x8a\xb5\x0c\x55\x1b\x87\x4a"
+ "\xa2\x69\xad\xd0\x47\xad\x2d\x59"
+ "\x13\xac\x19\xb7\xcf\xba\xd4\xa6"
+ "\x74\x84\x94\xe2\xd7\x7a\xf9\xbf"
+ "\x00\x8a\xa2\xd5\xb7\xf3\x60\xcf"
+ "\xa0\x47\xdf\x4e\x09\xf4\xb1\x7f"
+ "\x14\xd9\x3d\x53\x8e\x12\xb3\x00"
+ "\x4c\x0a\x4e\x32\x40\x43\x88\xce"
+ "\x92\x26\xc1\x76\x20\x11\xeb\xba"
+ "\x62\x4f\x9a\x62\x25\xc3\x75\x80"
+ "\xb7\x0a\x17\xf5\xd7\x94\xb4\x14",
+ .rlen = 80 + 64,
+ }, { /* NIST SP800-38A F.2.3 CBC-AES192.Encrypt */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x18" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x33\x44\x55\x66\x77\x88\x99\xaa"
+ "\xbb\xcc\xdd\xee\xff\x11\x22\x33"
+ "\x44\x55\x66\x77\x88\x99\xaa\xbb"
+ "\xcc\xdd\xee\xff\x11\x22\x33\x44"
+ "\x8e\x73\xb0\xf7\xda\x0e\x64\x52"
+ "\xc8\x10\xf3\x2b\x80\x90\x79\xe5"
+ "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b",
+ .klen = 8 + 64 + 24,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .ilen = 64,
+ .result = "\x4f\x02\x1d\xb2\x43\xbc\x63\x3d"
+ "\x71\x78\x18\x3a\x9f\xa0\x71\xe8"
+ "\xb4\xd9\xad\xa9\xad\x7d\xed\xf4"
+ "\xe5\xe7\x38\x76\x3f\x69\x14\x5a"
+ "\x57\x1b\x24\x20\x12\xfb\x7a\xe0"
+ "\x7f\xa9\xba\xac\x3d\xf1\x02\xe0"
+ "\x08\xb0\xe2\x79\x88\x59\x88\x81"
+ "\xd9\x20\xa9\xe6\x4f\x56\x15\xcd"
+ "\x77\x4b\x69\x9d\x3a\x0d\xb4\x99"
+ "\x8f\xc6\x8e\x0e\x72\x58\xe3\x56"
+ "\xbb\x21\xd2\x7d\x93\x11\x17\x91"
+ "\xc4\x83\xfd\x0a\xea\x71\xfe\x77"
+ "\xae\x6f\x0a\xa5\xf0\xcf\xe1\x35"
+ "\xba\x03\xd5\x32\xfa\x5f\x41\x58"
+ "\x8d\x43\x98\xa7\x94\x16\x07\x02"
+ "\x0f\xb6\x81\x50\x28\x95\x2e\x75",
+ .rlen = 64 + 64,
+ }, { /* NIST SP800-38A F.2.5 CBC-AES256.Encrypt */
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x20" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x33\x44\x55\x66\x77\x88\x99\xaa"
+ "\xbb\xcc\xdd\xee\xff\x11\x22\x33"
+ "\x44\x55\x66\x77\x88\x99\xaa\xbb"
+ "\xcc\xdd\xee\xff\x11\x22\x33\x44"
+ "\x60\x3d\xeb\x10\x15\xca\x71\xbe"
+ "\x2b\x73\xae\xf0\x85\x7d\x77\x81"
+ "\x1f\x35\x2c\x07\x3b\x61\x08\xd7"
+ "\x2d\x98\x10\xa3\x09\x14\xdf\xf4",
+ .klen = 8 + 64 + 32,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .ilen = 64,
+ .result = "\xf5\x8c\x4c\x04\xd6\xe5\xf1\xba"
+ "\x77\x9e\xab\xfb\x5f\x7b\xfb\xd6"
+ "\x9c\xfc\x4e\x96\x7e\xdb\x80\x8d"
+ "\x67\x9f\x77\x7b\xc6\x70\x2c\x7d"
+ "\x39\xf2\x33\x69\xa9\xd9\xba\xcf"
+ "\xa5\x30\xe2\x63\x04\x23\x14\x61"
+ "\xb2\xeb\x05\xe2\xc3\x9b\xe9\xfc"
+ "\xda\x6c\x19\x07\x8c\x6a\x9d\x1b"
+ "\xb2\x27\x69\x7f\x45\x64\x79\x2b"
+ "\xb7\xb8\x4c\xd4\x75\x94\x68\x40"
+ "\x2a\xea\x91\xc7\x3f\x7c\xed\x7b"
+ "\x95\x2c\x9b\xa8\xf5\xe5\x52\x8d"
+ "\x6b\xe1\xae\xf1\x74\xfa\x0d\x0c"
+ "\xe3\x8d\x64\xc3\x8d\xff\x7c\x8c"
+ "\xdb\xbf\xa0\xb4\x01\xa2\xa8\xa2"
+ "\x2c\xb1\x62\x2c\x10\xca\xf1\x21",
+ .rlen = 64 + 64,
+ },
+};
+
+#define HMAC_SHA1_DES_CBC_ENC_TEST_VEC 1
+
+static struct aead_testvec hmac_sha1_des_cbc_enc_tv_temp[] = {
+ { /*Generated with cryptopp*/
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x08" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55"
+ "\xE9\xC0\xFF\x2E\x76\x0B\x64\x24",
+ .klen = 8 + 20 + 8,
+ .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x6f\x54\x20\x6f\x61\x4d\x79\x6e"
+ "\x53\x20\x63\x65\x65\x72\x73\x74"
+ "\x54\x20\x6f\x6f\x4d\x20\x6e\x61"
+ "\x20\x79\x65\x53\x72\x63\x74\x65"
+ "\x20\x73\x6f\x54\x20\x6f\x61\x4d"
+ "\x79\x6e\x53\x20\x63\x65\x65\x72"
+ "\x73\x74\x54\x20\x6f\x6f\x4d\x20"
+ "\x6e\x61\x20\x79\x65\x53\x72\x63"
+ "\x74\x65\x20\x73\x6f\x54\x20\x6f"
+ "\x61\x4d\x79\x6e\x53\x20\x63\x65"
+ "\x65\x72\x73\x74\x54\x20\x6f\x6f"
+ "\x4d\x20\x6e\x61\x20\x79\x65\x53"
+ "\x72\x63\x74\x65\x20\x73\x6f\x54"
+ "\x20\x6f\x61\x4d\x79\x6e\x53\x20"
+ "\x63\x65\x65\x72\x73\x74\x54\x20"
+ "\x6f\x6f\x4d\x20\x6e\x61\x0a\x79",
+ .ilen = 128,
+ .result = "\x70\xd6\xde\x64\x87\x17\xf1\xe8"
+ "\x54\x31\x85\x37\xed\x6b\x01\x8d"
+ "\xe3\xcc\xe0\x1d\x5e\xf3\xfe\xf1"
+ "\x41\xaa\x33\x91\xa7\x7d\x99\x88"
+ "\x4d\x85\x6e\x2f\xa3\x69\xf5\x82"
+ "\x3a\x6f\x25\xcb\x7d\x58\x1f\x9b"
+ "\xaa\x9c\x11\xd5\x76\x67\xce\xde"
+ "\x56\xd7\x5a\x80\x69\xea\x3a\x02"
+ "\xf0\xc7\x7c\xe3\xcb\x40\xe5\x52"
+ "\xd1\x10\x92\x78\x0b\x8e\x5b\xf1"
+ "\xe3\x26\x1f\xe1\x15\x41\xc7\xba"
+ "\x99\xdb\x08\x51\x1c\xd3\x01\xf4"
+ "\x87\x47\x39\xb8\xd2\xdd\xbd\xfb"
+ "\x66\x13\xdf\x1c\x01\x44\xf0\x7a"
+ "\x1a\x6b\x13\xf5\xd5\x0b\xb8\xba"
+ "\x53\xba\xe1\x76\xe3\x82\x07\x86"
+ "\x95\x16\x20\x09\xf5\x95\x19\xfd"
+ "\x3c\xc7\xe0\x42\xc0\x14\x69\xfa"
+ "\x5c\x44\xa9\x37",
+ .rlen = 128 + 20,
+ },
+};
+
+#define HMAC_SHA224_DES_CBC_ENC_TEST_VEC 1
+
+static struct aead_testvec hmac_sha224_des_cbc_enc_tv_temp[] = {
+ { /*Generated with cryptopp*/
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x08" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xE9\xC0\xFF\x2E\x76\x0B\x64\x24",
+ .klen = 8 + 24 + 8,
+ .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x6f\x54\x20\x6f\x61\x4d\x79\x6e"
+ "\x53\x20\x63\x65\x65\x72\x73\x74"
+ "\x54\x20\x6f\x6f\x4d\x20\x6e\x61"
+ "\x20\x79\x65\x53\x72\x63\x74\x65"
+ "\x20\x73\x6f\x54\x20\x6f\x61\x4d"
+ "\x79\x6e\x53\x20\x63\x65\x65\x72"
+ "\x73\x74\x54\x20\x6f\x6f\x4d\x20"
+ "\x6e\x61\x20\x79\x65\x53\x72\x63"
+ "\x74\x65\x20\x73\x6f\x54\x20\x6f"
+ "\x61\x4d\x79\x6e\x53\x20\x63\x65"
+ "\x65\x72\x73\x74\x54\x20\x6f\x6f"
+ "\x4d\x20\x6e\x61\x20\x79\x65\x53"
+ "\x72\x63\x74\x65\x20\x73\x6f\x54"
+ "\x20\x6f\x61\x4d\x79\x6e\x53\x20"
+ "\x63\x65\x65\x72\x73\x74\x54\x20"
+ "\x6f\x6f\x4d\x20\x6e\x61\x0a\x79",
+ .ilen = 128,
+ .result = "\x70\xd6\xde\x64\x87\x17\xf1\xe8"
+ "\x54\x31\x85\x37\xed\x6b\x01\x8d"
+ "\xe3\xcc\xe0\x1d\x5e\xf3\xfe\xf1"
+ "\x41\xaa\x33\x91\xa7\x7d\x99\x88"
+ "\x4d\x85\x6e\x2f\xa3\x69\xf5\x82"
+ "\x3a\x6f\x25\xcb\x7d\x58\x1f\x9b"
+ "\xaa\x9c\x11\xd5\x76\x67\xce\xde"
+ "\x56\xd7\x5a\x80\x69\xea\x3a\x02"
+ "\xf0\xc7\x7c\xe3\xcb\x40\xe5\x52"
+ "\xd1\x10\x92\x78\x0b\x8e\x5b\xf1"
+ "\xe3\x26\x1f\xe1\x15\x41\xc7\xba"
+ "\x99\xdb\x08\x51\x1c\xd3\x01\xf4"
+ "\x87\x47\x39\xb8\xd2\xdd\xbd\xfb"
+ "\x66\x13\xdf\x1c\x01\x44\xf0\x7a"
+ "\x1a\x6b\x13\xf5\xd5\x0b\xb8\xba"
+ "\x53\xba\xe1\x76\xe3\x82\x07\x86"
+ "\x9c\x2d\x7e\xee\x20\x34\x55\x0a"
+ "\xce\xb5\x4e\x64\x53\xe7\xbf\x91"
+ "\xab\xd4\xd9\xda\xc9\x12\xae\xf7",
+ .rlen = 128 + 24,
+ },
+};
+
+#define HMAC_SHA256_DES_CBC_ENC_TEST_VEC 1
+
+static struct aead_testvec hmac_sha256_des_cbc_enc_tv_temp[] = {
+ { /*Generated with cryptopp*/
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x08" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\xE9\xC0\xFF\x2E\x76\x0B\x64\x24",
+ .klen = 8 + 32 + 8,
+ .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x6f\x54\x20\x6f\x61\x4d\x79\x6e"
+ "\x53\x20\x63\x65\x65\x72\x73\x74"
+ "\x54\x20\x6f\x6f\x4d\x20\x6e\x61"
+ "\x20\x79\x65\x53\x72\x63\x74\x65"
+ "\x20\x73\x6f\x54\x20\x6f\x61\x4d"
+ "\x79\x6e\x53\x20\x63\x65\x65\x72"
+ "\x73\x74\x54\x20\x6f\x6f\x4d\x20"
+ "\x6e\x61\x20\x79\x65\x53\x72\x63"
+ "\x74\x65\x20\x73\x6f\x54\x20\x6f"
+ "\x61\x4d\x79\x6e\x53\x20\x63\x65"
+ "\x65\x72\x73\x74\x54\x20\x6f\x6f"
+ "\x4d\x20\x6e\x61\x20\x79\x65\x53"
+ "\x72\x63\x74\x65\x20\x73\x6f\x54"
+ "\x20\x6f\x61\x4d\x79\x6e\x53\x20"
+ "\x63\x65\x65\x72\x73\x74\x54\x20"
+ "\x6f\x6f\x4d\x20\x6e\x61\x0a\x79",
+ .ilen = 128,
+ .result = "\x70\xd6\xde\x64\x87\x17\xf1\xe8"
+ "\x54\x31\x85\x37\xed\x6b\x01\x8d"
+ "\xe3\xcc\xe0\x1d\x5e\xf3\xfe\xf1"
+ "\x41\xaa\x33\x91\xa7\x7d\x99\x88"
+ "\x4d\x85\x6e\x2f\xa3\x69\xf5\x82"
+ "\x3a\x6f\x25\xcb\x7d\x58\x1f\x9b"
+ "\xaa\x9c\x11\xd5\x76\x67\xce\xde"
+ "\x56\xd7\x5a\x80\x69\xea\x3a\x02"
+ "\xf0\xc7\x7c\xe3\xcb\x40\xe5\x52"
+ "\xd1\x10\x92\x78\x0b\x8e\x5b\xf1"
+ "\xe3\x26\x1f\xe1\x15\x41\xc7\xba"
+ "\x99\xdb\x08\x51\x1c\xd3\x01\xf4"
+ "\x87\x47\x39\xb8\xd2\xdd\xbd\xfb"
+ "\x66\x13\xdf\x1c\x01\x44\xf0\x7a"
+ "\x1a\x6b\x13\xf5\xd5\x0b\xb8\xba"
+ "\x53\xba\xe1\x76\xe3\x82\x07\x86"
+ "\xc6\x58\xa1\x60\x70\x91\x39\x36"
+ "\x50\xf6\x5d\xab\x4b\x51\x4e\x5e"
+ "\xde\x63\xde\x76\x52\xde\x9f\xba"
+ "\x90\xcf\x15\xf2\xbb\x6e\x84\x00",
+ .rlen = 128 + 32,
+ },
+};
+
+#define HMAC_SHA384_DES_CBC_ENC_TEST_VEC 1
+
+static struct aead_testvec hmac_sha384_des_cbc_enc_tv_temp[] = {
+ { /*Generated with cryptopp*/
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x08" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x33\x44\x55\x66\x77\x88\x99\xaa"
+ "\xbb\xcc\xdd\xee\xff\x11\x22\x33"
+ "\xE9\xC0\xFF\x2E\x76\x0B\x64\x24",
+ .klen = 8 + 48 + 8,
+ .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x6f\x54\x20\x6f\x61\x4d\x79\x6e"
+ "\x53\x20\x63\x65\x65\x72\x73\x74"
+ "\x54\x20\x6f\x6f\x4d\x20\x6e\x61"
+ "\x20\x79\x65\x53\x72\x63\x74\x65"
+ "\x20\x73\x6f\x54\x20\x6f\x61\x4d"
+ "\x79\x6e\x53\x20\x63\x65\x65\x72"
+ "\x73\x74\x54\x20\x6f\x6f\x4d\x20"
+ "\x6e\x61\x20\x79\x65\x53\x72\x63"
+ "\x74\x65\x20\x73\x6f\x54\x20\x6f"
+ "\x61\x4d\x79\x6e\x53\x20\x63\x65"
+ "\x65\x72\x73\x74\x54\x20\x6f\x6f"
+ "\x4d\x20\x6e\x61\x20\x79\x65\x53"
+ "\x72\x63\x74\x65\x20\x73\x6f\x54"
+ "\x20\x6f\x61\x4d\x79\x6e\x53\x20"
+ "\x63\x65\x65\x72\x73\x74\x54\x20"
+ "\x6f\x6f\x4d\x20\x6e\x61\x0a\x79",
+ .ilen = 128,
+ .result = "\x70\xd6\xde\x64\x87\x17\xf1\xe8"
+ "\x54\x31\x85\x37\xed\x6b\x01\x8d"
+ "\xe3\xcc\xe0\x1d\x5e\xf3\xfe\xf1"
+ "\x41\xaa\x33\x91\xa7\x7d\x99\x88"
+ "\x4d\x85\x6e\x2f\xa3\x69\xf5\x82"
+ "\x3a\x6f\x25\xcb\x7d\x58\x1f\x9b"
+ "\xaa\x9c\x11\xd5\x76\x67\xce\xde"
+ "\x56\xd7\x5a\x80\x69\xea\x3a\x02"
+ "\xf0\xc7\x7c\xe3\xcb\x40\xe5\x52"
+ "\xd1\x10\x92\x78\x0b\x8e\x5b\xf1"
+ "\xe3\x26\x1f\xe1\x15\x41\xc7\xba"
+ "\x99\xdb\x08\x51\x1c\xd3\x01\xf4"
+ "\x87\x47\x39\xb8\xd2\xdd\xbd\xfb"
+ "\x66\x13\xdf\x1c\x01\x44\xf0\x7a"
+ "\x1a\x6b\x13\xf5\xd5\x0b\xb8\xba"
+ "\x53\xba\xe1\x76\xe3\x82\x07\x86"
+ "\xa8\x8e\x9c\x74\x8c\x2b\x99\xa0"
+ "\xc8\x8c\xef\x25\x07\x83\x11\x3a"
+ "\x31\x8d\xbe\x3b\x6a\xd7\x96\xfe"
+ "\x5e\x67\xb5\x74\xe7\xe7\x85\x61"
+ "\x6a\x95\x26\x75\xcc\x53\x89\xf3"
+ "\x74\xc9\x2a\x76\x20\xa2\x64\x62",
+ .rlen = 128 + 48,
+ },
+};
+
+#define HMAC_SHA512_DES_CBC_ENC_TEST_VEC 1
+
+static struct aead_testvec hmac_sha512_des_cbc_enc_tv_temp[] = {
+ { /*Generated with cryptopp*/
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x08" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x33\x44\x55\x66\x77\x88\x99\xaa"
+ "\xbb\xcc\xdd\xee\xff\x11\x22\x33"
+ "\x44\x55\x66\x77\x88\x99\xaa\xbb"
+ "\xcc\xdd\xee\xff\x11\x22\x33\x44"
+ "\xE9\xC0\xFF\x2E\x76\x0B\x64\x24",
+ .klen = 8 + 64 + 8,
+ .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x6f\x54\x20\x6f\x61\x4d\x79\x6e"
+ "\x53\x20\x63\x65\x65\x72\x73\x74"
+ "\x54\x20\x6f\x6f\x4d\x20\x6e\x61"
+ "\x20\x79\x65\x53\x72\x63\x74\x65"
+ "\x20\x73\x6f\x54\x20\x6f\x61\x4d"
+ "\x79\x6e\x53\x20\x63\x65\x65\x72"
+ "\x73\x74\x54\x20\x6f\x6f\x4d\x20"
+ "\x6e\x61\x20\x79\x65\x53\x72\x63"
+ "\x74\x65\x20\x73\x6f\x54\x20\x6f"
+ "\x61\x4d\x79\x6e\x53\x20\x63\x65"
+ "\x65\x72\x73\x74\x54\x20\x6f\x6f"
+ "\x4d\x20\x6e\x61\x20\x79\x65\x53"
+ "\x72\x63\x74\x65\x20\x73\x6f\x54"
+ "\x20\x6f\x61\x4d\x79\x6e\x53\x20"
+ "\x63\x65\x65\x72\x73\x74\x54\x20"
+ "\x6f\x6f\x4d\x20\x6e\x61\x0a\x79",
+ .ilen = 128,
+ .result = "\x70\xd6\xde\x64\x87\x17\xf1\xe8"
+ "\x54\x31\x85\x37\xed\x6b\x01\x8d"
+ "\xe3\xcc\xe0\x1d\x5e\xf3\xfe\xf1"
+ "\x41\xaa\x33\x91\xa7\x7d\x99\x88"
+ "\x4d\x85\x6e\x2f\xa3\x69\xf5\x82"
+ "\x3a\x6f\x25\xcb\x7d\x58\x1f\x9b"
+ "\xaa\x9c\x11\xd5\x76\x67\xce\xde"
+ "\x56\xd7\x5a\x80\x69\xea\x3a\x02"
+ "\xf0\xc7\x7c\xe3\xcb\x40\xe5\x52"
+ "\xd1\x10\x92\x78\x0b\x8e\x5b\xf1"
+ "\xe3\x26\x1f\xe1\x15\x41\xc7\xba"
+ "\x99\xdb\x08\x51\x1c\xd3\x01\xf4"
+ "\x87\x47\x39\xb8\xd2\xdd\xbd\xfb"
+ "\x66\x13\xdf\x1c\x01\x44\xf0\x7a"
+ "\x1a\x6b\x13\xf5\xd5\x0b\xb8\xba"
+ "\x53\xba\xe1\x76\xe3\x82\x07\x86"
+ "\xc6\x2c\x73\x88\xb0\x9d\x5f\x3e"
+ "\x5b\x78\xca\x0e\xab\x8a\xa3\xbb"
+ "\xd9\x1d\xc3\xe3\x05\xac\x76\xfb"
+ "\x58\x83\xda\x67\xfb\x21\x24\xa2"
+ "\xb1\xa7\xd7\x66\xa6\x8d\xa6\x93"
+ "\x97\xe2\xe3\xb8\xaa\x48\x85\xee"
+ "\x8c\xf6\x07\x95\x1f\xa6\x6c\x96"
+ "\x99\xc7\x5c\x8d\xd8\xb5\x68\x7b",
+ .rlen = 128 + 64,
+ },
+};
+
+#define HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC 1
+
+static struct aead_testvec hmac_sha1_des3_ede_cbc_enc_tv_temp[] = {
+ { /*Generated with cryptopp*/
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x18" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55"
+ "\xE9\xC0\xFF\x2E\x76\x0B\x64\x24"
+ "\x44\x4D\x99\x5A\x12\xD6\x40\xC0"
+ "\xEA\xC2\x84\xE8\x14\x95\xDB\xE8",
+ .klen = 8 + 20 + 24,
+ .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x6f\x54\x20\x6f\x61\x4d\x79\x6e"
+ "\x53\x20\x63\x65\x65\x72\x73\x74"
+ "\x54\x20\x6f\x6f\x4d\x20\x6e\x61"
+ "\x20\x79\x65\x53\x72\x63\x74\x65"
+ "\x20\x73\x6f\x54\x20\x6f\x61\x4d"
+ "\x79\x6e\x53\x20\x63\x65\x65\x72"
+ "\x73\x74\x54\x20\x6f\x6f\x4d\x20"
+ "\x6e\x61\x20\x79\x65\x53\x72\x63"
+ "\x74\x65\x20\x73\x6f\x54\x20\x6f"
+ "\x61\x4d\x79\x6e\x53\x20\x63\x65"
+ "\x65\x72\x73\x74\x54\x20\x6f\x6f"
+ "\x4d\x20\x6e\x61\x20\x79\x65\x53"
+ "\x72\x63\x74\x65\x20\x73\x6f\x54"
+ "\x20\x6f\x61\x4d\x79\x6e\x53\x20"
+ "\x63\x65\x65\x72\x73\x74\x54\x20"
+ "\x6f\x6f\x4d\x20\x6e\x61\x0a\x79",
+ .ilen = 128,
+ .result = "\x0e\x2d\xb6\x97\x3c\x56\x33\xf4"
+ "\x67\x17\x21\xc7\x6e\x8a\xd5\x49"
+ "\x74\xb3\x49\x05\xc5\x1c\xd0\xed"
+ "\x12\x56\x5c\x53\x96\xb6\x00\x7d"
+ "\x90\x48\xfc\xf5\x8d\x29\x39\xcc"
+ "\x8a\xd5\x35\x18\x36\x23\x4e\xd7"
+ "\x76\xd1\xda\x0c\x94\x67\xbb\x04"
+ "\x8b\xf2\x03\x6c\xa8\xcf\xb6\xea"
+ "\x22\x64\x47\xaa\x8f\x75\x13\xbf"
+ "\x9f\xc2\xc3\xf0\xc9\x56\xc5\x7a"
+ "\x71\x63\x2e\x89\x7b\x1e\x12\xca"
+ "\xe2\x5f\xaf\xd8\xa4\xf8\xc9\x7a"
+ "\xd6\xf9\x21\x31\x62\x44\x45\xa6"
+ "\xd6\xbc\x5a\xd3\x2d\x54\x43\xcc"
+ "\x9d\xde\xa5\x70\xe9\x42\x45\x8a"
+ "\x6b\xfa\xb1\x91\x13\xb0\xd9\x19"
+ "\x67\x6d\xb1\xf5\xb8\x10\xdc\xc6"
+ "\x75\x86\x96\x6b\xb1\xc5\xe4\xcf"
+ "\xd1\x60\x91\xb3",
+ .rlen = 128 + 20,
+ },
+};
+
+#define HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC 1
+
+static struct aead_testvec hmac_sha224_des3_ede_cbc_enc_tv_temp[] = {
+ { /*Generated with cryptopp*/
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x18" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xE9\xC0\xFF\x2E\x76\x0B\x64\x24"
+ "\x44\x4D\x99\x5A\x12\xD6\x40\xC0"
+ "\xEA\xC2\x84\xE8\x14\x95\xDB\xE8",
+ .klen = 8 + 24 + 24,
+ .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x6f\x54\x20\x6f\x61\x4d\x79\x6e"
+ "\x53\x20\x63\x65\x65\x72\x73\x74"
+ "\x54\x20\x6f\x6f\x4d\x20\x6e\x61"
+ "\x20\x79\x65\x53\x72\x63\x74\x65"
+ "\x20\x73\x6f\x54\x20\x6f\x61\x4d"
+ "\x79\x6e\x53\x20\x63\x65\x65\x72"
+ "\x73\x74\x54\x20\x6f\x6f\x4d\x20"
+ "\x6e\x61\x20\x79\x65\x53\x72\x63"
+ "\x74\x65\x20\x73\x6f\x54\x20\x6f"
+ "\x61\x4d\x79\x6e\x53\x20\x63\x65"
+ "\x65\x72\x73\x74\x54\x20\x6f\x6f"
+ "\x4d\x20\x6e\x61\x20\x79\x65\x53"
+ "\x72\x63\x74\x65\x20\x73\x6f\x54"
+ "\x20\x6f\x61\x4d\x79\x6e\x53\x20"
+ "\x63\x65\x65\x72\x73\x74\x54\x20"
+ "\x6f\x6f\x4d\x20\x6e\x61\x0a\x79",
+ .ilen = 128,
+ .result = "\x0e\x2d\xb6\x97\x3c\x56\x33\xf4"
+ "\x67\x17\x21\xc7\x6e\x8a\xd5\x49"
+ "\x74\xb3\x49\x05\xc5\x1c\xd0\xed"
+ "\x12\x56\x5c\x53\x96\xb6\x00\x7d"
+ "\x90\x48\xfc\xf5\x8d\x29\x39\xcc"
+ "\x8a\xd5\x35\x18\x36\x23\x4e\xd7"
+ "\x76\xd1\xda\x0c\x94\x67\xbb\x04"
+ "\x8b\xf2\x03\x6c\xa8\xcf\xb6\xea"
+ "\x22\x64\x47\xaa\x8f\x75\x13\xbf"
+ "\x9f\xc2\xc3\xf0\xc9\x56\xc5\x7a"
+ "\x71\x63\x2e\x89\x7b\x1e\x12\xca"
+ "\xe2\x5f\xaf\xd8\xa4\xf8\xc9\x7a"
+ "\xd6\xf9\x21\x31\x62\x44\x45\xa6"
+ "\xd6\xbc\x5a\xd3\x2d\x54\x43\xcc"
+ "\x9d\xde\xa5\x70\xe9\x42\x45\x8a"
+ "\x6b\xfa\xb1\x91\x13\xb0\xd9\x19"
+ "\x15\x24\x7f\x5a\x45\x4a\x66\xce"
+ "\x2b\x0b\x93\x99\x2f\x9d\x0c\x6c"
+ "\x56\x1f\xe1\xa6\x41\xb2\x4c\xd0",
+ .rlen = 128 + 24,
+ },
+};
+
+#define HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC 1
+
+static struct aead_testvec hmac_sha256_des3_ede_cbc_enc_tv_temp[] = {
+ { /*Generated with cryptopp*/
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x18" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\xE9\xC0\xFF\x2E\x76\x0B\x64\x24"
+ "\x44\x4D\x99\x5A\x12\xD6\x40\xC0"
+ "\xEA\xC2\x84\xE8\x14\x95\xDB\xE8",
+ .klen = 8 + 32 + 24,
+ .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x6f\x54\x20\x6f\x61\x4d\x79\x6e"
+ "\x53\x20\x63\x65\x65\x72\x73\x74"
+ "\x54\x20\x6f\x6f\x4d\x20\x6e\x61"
+ "\x20\x79\x65\x53\x72\x63\x74\x65"
+ "\x20\x73\x6f\x54\x20\x6f\x61\x4d"
+ "\x79\x6e\x53\x20\x63\x65\x65\x72"
+ "\x73\x74\x54\x20\x6f\x6f\x4d\x20"
+ "\x6e\x61\x20\x79\x65\x53\x72\x63"
+ "\x74\x65\x20\x73\x6f\x54\x20\x6f"
+ "\x61\x4d\x79\x6e\x53\x20\x63\x65"
+ "\x65\x72\x73\x74\x54\x20\x6f\x6f"
+ "\x4d\x20\x6e\x61\x20\x79\x65\x53"
+ "\x72\x63\x74\x65\x20\x73\x6f\x54"
+ "\x20\x6f\x61\x4d\x79\x6e\x53\x20"
+ "\x63\x65\x65\x72\x73\x74\x54\x20"
+ "\x6f\x6f\x4d\x20\x6e\x61\x0a\x79",
+ .ilen = 128,
+ .result = "\x0e\x2d\xb6\x97\x3c\x56\x33\xf4"
+ "\x67\x17\x21\xc7\x6e\x8a\xd5\x49"
+ "\x74\xb3\x49\x05\xc5\x1c\xd0\xed"
+ "\x12\x56\x5c\x53\x96\xb6\x00\x7d"
+ "\x90\x48\xfc\xf5\x8d\x29\x39\xcc"
+ "\x8a\xd5\x35\x18\x36\x23\x4e\xd7"
+ "\x76\xd1\xda\x0c\x94\x67\xbb\x04"
+ "\x8b\xf2\x03\x6c\xa8\xcf\xb6\xea"
+ "\x22\x64\x47\xaa\x8f\x75\x13\xbf"
+ "\x9f\xc2\xc3\xf0\xc9\x56\xc5\x7a"
+ "\x71\x63\x2e\x89\x7b\x1e\x12\xca"
+ "\xe2\x5f\xaf\xd8\xa4\xf8\xc9\x7a"
+ "\xd6\xf9\x21\x31\x62\x44\x45\xa6"
+ "\xd6\xbc\x5a\xd3\x2d\x54\x43\xcc"
+ "\x9d\xde\xa5\x70\xe9\x42\x45\x8a"
+ "\x6b\xfa\xb1\x91\x13\xb0\xd9\x19"
+ "\x73\xb0\xea\x9f\xe8\x18\x80\xd6"
+ "\x56\x38\x44\xc0\xdb\xe3\x4f\x71"
+ "\xf7\xce\xd1\xd3\xf8\xbd\x3e\x4f"
+ "\xca\x43\x95\xdf\x80\x61\x81\xa9",
+ .rlen = 128 + 32,
+ },
+};
+
+#define HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC 1
+
+static struct aead_testvec hmac_sha384_des3_ede_cbc_enc_tv_temp[] = {
+ { /*Generated with cryptopp*/
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x18" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x33\x44\x55\x66\x77\x88\x99\xaa"
+ "\xbb\xcc\xdd\xee\xff\x11\x22\x33"
+ "\xE9\xC0\xFF\x2E\x76\x0B\x64\x24"
+ "\x44\x4D\x99\x5A\x12\xD6\x40\xC0"
+ "\xEA\xC2\x84\xE8\x14\x95\xDB\xE8",
+ .klen = 8 + 48 + 24,
+ .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x6f\x54\x20\x6f\x61\x4d\x79\x6e"
+ "\x53\x20\x63\x65\x65\x72\x73\x74"
+ "\x54\x20\x6f\x6f\x4d\x20\x6e\x61"
+ "\x20\x79\x65\x53\x72\x63\x74\x65"
+ "\x20\x73\x6f\x54\x20\x6f\x61\x4d"
+ "\x79\x6e\x53\x20\x63\x65\x65\x72"
+ "\x73\x74\x54\x20\x6f\x6f\x4d\x20"
+ "\x6e\x61\x20\x79\x65\x53\x72\x63"
+ "\x74\x65\x20\x73\x6f\x54\x20\x6f"
+ "\x61\x4d\x79\x6e\x53\x20\x63\x65"
+ "\x65\x72\x73\x74\x54\x20\x6f\x6f"
+ "\x4d\x20\x6e\x61\x20\x79\x65\x53"
+ "\x72\x63\x74\x65\x20\x73\x6f\x54"
+ "\x20\x6f\x61\x4d\x79\x6e\x53\x20"
+ "\x63\x65\x65\x72\x73\x74\x54\x20"
+ "\x6f\x6f\x4d\x20\x6e\x61\x0a\x79",
+ .ilen = 128,
+ .result = "\x0e\x2d\xb6\x97\x3c\x56\x33\xf4"
+ "\x67\x17\x21\xc7\x6e\x8a\xd5\x49"
+ "\x74\xb3\x49\x05\xc5\x1c\xd0\xed"
+ "\x12\x56\x5c\x53\x96\xb6\x00\x7d"
+ "\x90\x48\xfc\xf5\x8d\x29\x39\xcc"
+ "\x8a\xd5\x35\x18\x36\x23\x4e\xd7"
+ "\x76\xd1\xda\x0c\x94\x67\xbb\x04"
+ "\x8b\xf2\x03\x6c\xa8\xcf\xb6\xea"
+ "\x22\x64\x47\xaa\x8f\x75\x13\xbf"
+ "\x9f\xc2\xc3\xf0\xc9\x56\xc5\x7a"
+ "\x71\x63\x2e\x89\x7b\x1e\x12\xca"
+ "\xe2\x5f\xaf\xd8\xa4\xf8\xc9\x7a"
+ "\xd6\xf9\x21\x31\x62\x44\x45\xa6"
+ "\xd6\xbc\x5a\xd3\x2d\x54\x43\xcc"
+ "\x9d\xde\xa5\x70\xe9\x42\x45\x8a"
+ "\x6b\xfa\xb1\x91\x13\xb0\xd9\x19"
+ "\x6d\x77\xfc\x80\x9d\x8a\x9c\xb7"
+ "\x70\xe7\x93\xbf\x73\xe6\x9f\x83"
+ "\x99\x62\x23\xe6\x5b\xd0\xda\x18"
+ "\xa4\x32\x8a\x0b\x46\xd7\xf0\x39"
+ "\x36\x5d\x13\x2f\x86\x10\x78\xd6"
+ "\xd6\xbe\x5c\xb9\x15\x89\xf9\x1b",
+ .rlen = 128 + 48,
+ },
+};
+
+#define HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC 1
+
+static struct aead_testvec hmac_sha512_des3_ede_cbc_enc_tv_temp[] = {
+ { /*Generated with cryptopp*/
+#ifdef __LITTLE_ENDIAN
+ .key = "\x08\x00" /* rta length */
+ "\x01\x00" /* rta type */
+#else
+ .key = "\x00\x08" /* rta length */
+ "\x00\x01" /* rta type */
+#endif
+ "\x00\x00\x00\x18" /* enc key length */
+ "\x11\x22\x33\x44\x55\x66\x77\x88"
+ "\x99\xaa\xbb\xcc\xdd\xee\xff\x11"
+ "\x22\x33\x44\x55\x66\x77\x88\x99"
+ "\xaa\xbb\xcc\xdd\xee\xff\x11\x22"
+ "\x33\x44\x55\x66\x77\x88\x99\xaa"
+ "\xbb\xcc\xdd\xee\xff\x11\x22\x33"
+ "\x44\x55\x66\x77\x88\x99\xaa\xbb"
+ "\xcc\xdd\xee\xff\x11\x22\x33\x44"
+ "\xE9\xC0\xFF\x2E\x76\x0B\x64\x24"
+ "\x44\x4D\x99\x5A\x12\xD6\x40\xC0"
+ "\xEA\xC2\x84\xE8\x14\x95\xDB\xE8",
+ .klen = 8 + 64 + 24,
+ .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42",
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x01",
+ .alen = 8,
+ .input = "\x6f\x54\x20\x6f\x61\x4d\x79\x6e"
+ "\x53\x20\x63\x65\x65\x72\x73\x74"
+ "\x54\x20\x6f\x6f\x4d\x20\x6e\x61"
+ "\x20\x79\x65\x53\x72\x63\x74\x65"
+ "\x20\x73\x6f\x54\x20\x6f\x61\x4d"
+ "\x79\x6e\x53\x20\x63\x65\x65\x72"
+ "\x73\x74\x54\x20\x6f\x6f\x4d\x20"
+ "\x6e\x61\x20\x79\x65\x53\x72\x63"
+ "\x74\x65\x20\x73\x6f\x54\x20\x6f"
+ "\x61\x4d\x79\x6e\x53\x20\x63\x65"
+ "\x65\x72\x73\x74\x54\x20\x6f\x6f"
+ "\x4d\x20\x6e\x61\x20\x79\x65\x53"
+ "\x72\x63\x74\x65\x20\x73\x6f\x54"
+ "\x20\x6f\x61\x4d\x79\x6e\x53\x20"
+ "\x63\x65\x65\x72\x73\x74\x54\x20"
+ "\x6f\x6f\x4d\x20\x6e\x61\x0a\x79",
+ .ilen = 128,
+ .result = "\x0e\x2d\xb6\x97\x3c\x56\x33\xf4"
+ "\x67\x17\x21\xc7\x6e\x8a\xd5\x49"
+ "\x74\xb3\x49\x05\xc5\x1c\xd0\xed"
+ "\x12\x56\x5c\x53\x96\xb6\x00\x7d"
+ "\x90\x48\xfc\xf5\x8d\x29\x39\xcc"
+ "\x8a\xd5\x35\x18\x36\x23\x4e\xd7"
+ "\x76\xd1\xda\x0c\x94\x67\xbb\x04"
+ "\x8b\xf2\x03\x6c\xa8\xcf\xb6\xea"
+ "\x22\x64\x47\xaa\x8f\x75\x13\xbf"
+ "\x9f\xc2\xc3\xf0\xc9\x56\xc5\x7a"
+ "\x71\x63\x2e\x89\x7b\x1e\x12\xca"
+ "\xe2\x5f\xaf\xd8\xa4\xf8\xc9\x7a"
+ "\xd6\xf9\x21\x31\x62\x44\x45\xa6"
+ "\xd6\xbc\x5a\xd3\x2d\x54\x43\xcc"
+ "\x9d\xde\xa5\x70\xe9\x42\x45\x8a"
+ "\x6b\xfa\xb1\x91\x13\xb0\xd9\x19"
+ "\x41\xb5\x1f\xbb\xbd\x4e\xb8\x32"
+ "\x22\x86\x4e\x57\x1b\x2a\xd8\x6e"
+ "\xa9\xfb\xc8\xf3\xbf\x2d\xae\x2b"
+ "\x3b\xbc\x41\xe8\x38\xbb\xf1\x60"
+ "\x4c\x68\xa9\x4e\x8c\x73\xa7\xc0"
+ "\x2a\x74\xd4\x65\x12\xcb\x55\xf2"
+ "\xd5\x02\x6d\xe6\xaf\xc9\x2f\xf2"
+ "\x57\xaa\x85\xf7\xf3\x6a\xcb\xdb",
+ .rlen = 128 + 64,
},
};
@@ -3328,6 +16264,9 @@ static struct cipher_testvec aes_lrw_enc_tv_template[] = {
"\xcd\x7e\x2b\x5d\x43\xea\x42\xe7"
"\x74\x3f\x7d\x58\x88\x75\xde\x3e",
.rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
}
};
@@ -3579,6 +16518,9 @@ static struct cipher_testvec aes_lrw_dec_tv_template[] = {
"\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4"
"\x21\xc4\xc2\x75\x67\x89\x37\x0a",
.rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
}
};
@@ -3776,6 +16718,151 @@ static struct cipher_testvec aes_xts_enc_tv_template[] = {
"\x0a\x28\x2d\xf9\x20\x14\x7b\xea"
"\xbe\x42\x1e\xe5\x31\x9d\x05\x68",
.rlen = 512,
+ }, { /* XTS-AES 10, XTS-AES-256, data unit 512 bytes */
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x62\x49\x77\x57\x24\x70\x93\x69"
+ "\x99\x59\x57\x49\x66\x96\x76\x27"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95"
+ "\x02\x88\x41\x97\x16\x93\x99\x37"
+ "\x51\x05\x82\x09\x74\x94\x45\x92",
+ .klen = 64,
+ .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .ilen = 512,
+ .result = "\x1c\x3b\x3a\x10\x2f\x77\x03\x86"
+ "\xe4\x83\x6c\x99\xe3\x70\xcf\x9b"
+ "\xea\x00\x80\x3f\x5e\x48\x23\x57"
+ "\xa4\xae\x12\xd4\x14\xa3\xe6\x3b"
+ "\x5d\x31\xe2\x76\xf8\xfe\x4a\x8d"
+ "\x66\xb3\x17\xf9\xac\x68\x3f\x44"
+ "\x68\x0a\x86\xac\x35\xad\xfc\x33"
+ "\x45\xbe\xfe\xcb\x4b\xb1\x88\xfd"
+ "\x57\x76\x92\x6c\x49\xa3\x09\x5e"
+ "\xb1\x08\xfd\x10\x98\xba\xec\x70"
+ "\xaa\xa6\x69\x99\xa7\x2a\x82\xf2"
+ "\x7d\x84\x8b\x21\xd4\xa7\x41\xb0"
+ "\xc5\xcd\x4d\x5f\xff\x9d\xac\x89"
+ "\xae\xba\x12\x29\x61\xd0\x3a\x75"
+ "\x71\x23\xe9\x87\x0f\x8a\xcf\x10"
+ "\x00\x02\x08\x87\x89\x14\x29\xca"
+ "\x2a\x3e\x7a\x7d\x7d\xf7\xb1\x03"
+ "\x55\x16\x5c\x8b\x9a\x6d\x0a\x7d"
+ "\xe8\xb0\x62\xc4\x50\x0d\xc4\xcd"
+ "\x12\x0c\x0f\x74\x18\xda\xe3\xd0"
+ "\xb5\x78\x1c\x34\x80\x3f\xa7\x54"
+ "\x21\xc7\x90\xdf\xe1\xde\x18\x34"
+ "\xf2\x80\xd7\x66\x7b\x32\x7f\x6c"
+ "\x8c\xd7\x55\x7e\x12\xac\x3a\x0f"
+ "\x93\xec\x05\xc5\x2e\x04\x93\xef"
+ "\x31\xa1\x2d\x3d\x92\x60\xf7\x9a"
+ "\x28\x9d\x6a\x37\x9b\xc7\x0c\x50"
+ "\x84\x14\x73\xd1\xa8\xcc\x81\xec"
+ "\x58\x3e\x96\x45\xe0\x7b\x8d\x96"
+ "\x70\x65\x5b\xa5\xbb\xcf\xec\xc6"
+ "\xdc\x39\x66\x38\x0a\xd8\xfe\xcb"
+ "\x17\xb6\xba\x02\x46\x9a\x02\x0a"
+ "\x84\xe1\x8e\x8f\x84\x25\x20\x70"
+ "\xc1\x3e\x9f\x1f\x28\x9b\xe5\x4f"
+ "\xbc\x48\x14\x57\x77\x8f\x61\x60"
+ "\x15\xe1\x32\x7a\x02\xb1\x40\xf1"
+ "\x50\x5e\xb3\x09\x32\x6d\x68\x37"
+ "\x8f\x83\x74\x59\x5c\x84\x9d\x84"
+ "\xf4\xc3\x33\xec\x44\x23\x88\x51"
+ "\x43\xcb\x47\xbd\x71\xc5\xed\xae"
+ "\x9b\xe6\x9a\x2f\xfe\xce\xb1\xbe"
+ "\xc9\xde\x24\x4f\xbe\x15\x99\x2b"
+ "\x11\xb7\x7c\x04\x0f\x12\xbd\x8f"
+ "\x6a\x97\x5a\x44\xa0\xf9\x0c\x29"
+ "\xa9\xab\xc3\xd4\xd8\x93\x92\x72"
+ "\x84\xc5\x87\x54\xcc\xe2\x94\x52"
+ "\x9f\x86\x14\xdc\xd2\xab\xa9\x91"
+ "\x92\x5f\xed\xc4\xae\x74\xff\xac"
+ "\x6e\x33\x3b\x93\xeb\x4a\xff\x04"
+ "\x79\xda\x9a\x41\x0e\x44\x50\xe0"
+ "\xdd\x7a\xe4\xc6\xe2\x91\x09\x00"
+ "\x57\x5d\xa4\x01\xfc\x07\x05\x9f"
+ "\x64\x5e\x8b\x7e\x9b\xfd\xef\x33"
+ "\x94\x30\x54\xff\x84\x01\x14\x93"
+ "\xc2\x7b\x34\x29\xea\xed\xb4\xed"
+ "\x53\x76\x44\x1a\x77\xed\x43\x85"
+ "\x1a\xd7\x7f\x16\xf5\x41\xdf\xd2"
+ "\x69\xd5\x0d\x6a\x5f\x14\xfb\x0a"
+ "\xab\x1c\xbb\x4c\x15\x50\xbe\x97"
+ "\xf7\xab\x40\x66\x19\x3c\x4c\xaa"
+ "\x77\x3d\xad\x38\x01\x4b\xd2\x09"
+ "\x2f\xa7\x55\xc8\x24\xbb\x5e\x54"
+ "\xc4\xf3\x6f\xfd\xa9\xfc\xea\x70"
+ "\xb9\xc6\xe6\x93\xe1\x48\xc1\x51",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
}
};
@@ -3973,11 +17060,866 @@ static struct cipher_testvec aes_xts_dec_tv_template[] = {
"\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
"\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
.rlen = 512,
+ }, { /* XTS-AES 10, XTS-AES-256, data unit 512 bytes */
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x62\x49\x77\x57\x24\x70\x93\x69"
+ "\x99\x59\x57\x49\x66\x96\x76\x27"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95"
+ "\x02\x88\x41\x97\x16\x93\x99\x37"
+ "\x51\x05\x82\x09\x74\x94\x45\x92",
+ .klen = 64,
+ .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x1c\x3b\x3a\x10\x2f\x77\x03\x86"
+ "\xe4\x83\x6c\x99\xe3\x70\xcf\x9b"
+ "\xea\x00\x80\x3f\x5e\x48\x23\x57"
+ "\xa4\xae\x12\xd4\x14\xa3\xe6\x3b"
+ "\x5d\x31\xe2\x76\xf8\xfe\x4a\x8d"
+ "\x66\xb3\x17\xf9\xac\x68\x3f\x44"
+ "\x68\x0a\x86\xac\x35\xad\xfc\x33"
+ "\x45\xbe\xfe\xcb\x4b\xb1\x88\xfd"
+ "\x57\x76\x92\x6c\x49\xa3\x09\x5e"
+ "\xb1\x08\xfd\x10\x98\xba\xec\x70"
+ "\xaa\xa6\x69\x99\xa7\x2a\x82\xf2"
+ "\x7d\x84\x8b\x21\xd4\xa7\x41\xb0"
+ "\xc5\xcd\x4d\x5f\xff\x9d\xac\x89"
+ "\xae\xba\x12\x29\x61\xd0\x3a\x75"
+ "\x71\x23\xe9\x87\x0f\x8a\xcf\x10"
+ "\x00\x02\x08\x87\x89\x14\x29\xca"
+ "\x2a\x3e\x7a\x7d\x7d\xf7\xb1\x03"
+ "\x55\x16\x5c\x8b\x9a\x6d\x0a\x7d"
+ "\xe8\xb0\x62\xc4\x50\x0d\xc4\xcd"
+ "\x12\x0c\x0f\x74\x18\xda\xe3\xd0"
+ "\xb5\x78\x1c\x34\x80\x3f\xa7\x54"
+ "\x21\xc7\x90\xdf\xe1\xde\x18\x34"
+ "\xf2\x80\xd7\x66\x7b\x32\x7f\x6c"
+ "\x8c\xd7\x55\x7e\x12\xac\x3a\x0f"
+ "\x93\xec\x05\xc5\x2e\x04\x93\xef"
+ "\x31\xa1\x2d\x3d\x92\x60\xf7\x9a"
+ "\x28\x9d\x6a\x37\x9b\xc7\x0c\x50"
+ "\x84\x14\x73\xd1\xa8\xcc\x81\xec"
+ "\x58\x3e\x96\x45\xe0\x7b\x8d\x96"
+ "\x70\x65\x5b\xa5\xbb\xcf\xec\xc6"
+ "\xdc\x39\x66\x38\x0a\xd8\xfe\xcb"
+ "\x17\xb6\xba\x02\x46\x9a\x02\x0a"
+ "\x84\xe1\x8e\x8f\x84\x25\x20\x70"
+ "\xc1\x3e\x9f\x1f\x28\x9b\xe5\x4f"
+ "\xbc\x48\x14\x57\x77\x8f\x61\x60"
+ "\x15\xe1\x32\x7a\x02\xb1\x40\xf1"
+ "\x50\x5e\xb3\x09\x32\x6d\x68\x37"
+ "\x8f\x83\x74\x59\x5c\x84\x9d\x84"
+ "\xf4\xc3\x33\xec\x44\x23\x88\x51"
+ "\x43\xcb\x47\xbd\x71\xc5\xed\xae"
+ "\x9b\xe6\x9a\x2f\xfe\xce\xb1\xbe"
+ "\xc9\xde\x24\x4f\xbe\x15\x99\x2b"
+ "\x11\xb7\x7c\x04\x0f\x12\xbd\x8f"
+ "\x6a\x97\x5a\x44\xa0\xf9\x0c\x29"
+ "\xa9\xab\xc3\xd4\xd8\x93\x92\x72"
+ "\x84\xc5\x87\x54\xcc\xe2\x94\x52"
+ "\x9f\x86\x14\xdc\xd2\xab\xa9\x91"
+ "\x92\x5f\xed\xc4\xae\x74\xff\xac"
+ "\x6e\x33\x3b\x93\xeb\x4a\xff\x04"
+ "\x79\xda\x9a\x41\x0e\x44\x50\xe0"
+ "\xdd\x7a\xe4\xc6\xe2\x91\x09\x00"
+ "\x57\x5d\xa4\x01\xfc\x07\x05\x9f"
+ "\x64\x5e\x8b\x7e\x9b\xfd\xef\x33"
+ "\x94\x30\x54\xff\x84\x01\x14\x93"
+ "\xc2\x7b\x34\x29\xea\xed\xb4\xed"
+ "\x53\x76\x44\x1a\x77\xed\x43\x85"
+ "\x1a\xd7\x7f\x16\xf5\x41\xdf\xd2"
+ "\x69\xd5\x0d\x6a\x5f\x14\xfb\x0a"
+ "\xab\x1c\xbb\x4c\x15\x50\xbe\x97"
+ "\xf7\xab\x40\x66\x19\x3c\x4c\xaa"
+ "\x77\x3d\xad\x38\x01\x4b\xd2\x09"
+ "\x2f\xa7\x55\xc8\x24\xbb\x5e\x54"
+ "\xc4\xf3\x6f\xfd\xa9\xfc\xea\x70"
+ "\xb9\xc6\xe6\x93\xe1\x48\xc1\x51",
+ .ilen = 512,
+ .result = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
}
};
static struct cipher_testvec aes_ctr_enc_tv_template[] = {
+ { /* From NIST Special Publication 800-38A, Appendix F.5 */
+ .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6"
+ "\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
+ .klen = 16,
+ .iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .ilen = 64,
+ .result = "\x87\x4d\x61\x91\xb6\x20\xe3\x26"
+ "\x1b\xef\x68\x64\x99\x0d\xb6\xce"
+ "\x98\x06\xf6\x6b\x79\x70\xfd\xff"
+ "\x86\x17\x18\x7b\xb9\xff\xfd\xff"
+ "\x5a\xe4\xdf\x3e\xdb\xd5\xd3\x5e"
+ "\x5b\x4f\x09\x02\x0d\xb0\x3e\xab"
+ "\x1e\x03\x1d\xda\x2f\xbe\x03\xd1"
+ "\x79\x21\x70\xa0\xf3\x00\x9c\xee",
+ .rlen = 64,
+ }, {
+ .key = "\x8e\x73\xb0\xf7\xda\x0e\x64\x52"
+ "\xc8\x10\xf3\x2b\x80\x90\x79\xe5"
+ "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b",
+ .klen = 24,
+ .iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .ilen = 64,
+ .result = "\x1a\xbc\x93\x24\x17\x52\x1c\xa2"
+ "\x4f\x2b\x04\x59\xfe\x7e\x6e\x0b"
+ "\x09\x03\x39\xec\x0a\xa6\xfa\xef"
+ "\xd5\xcc\xc2\xc6\xf4\xce\x8e\x94"
+ "\x1e\x36\xb2\x6b\xd1\xeb\xc6\x70"
+ "\xd1\xbd\x1d\x66\x56\x20\xab\xf7"
+ "\x4f\x78\xa7\xf6\xd2\x98\x09\x58"
+ "\x5a\x97\xda\xec\x58\xc6\xb0\x50",
+ .rlen = 64,
+ }, {
+ .key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe"
+ "\x2b\x73\xae\xf0\x85\x7d\x77\x81"
+ "\x1f\x35\x2c\x07\x3b\x61\x08\xd7"
+ "\x2d\x98\x10\xa3\x09\x14\xdf\xf4",
+ .klen = 32,
+ .iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .ilen = 64,
+ .result = "\x60\x1e\xc3\x13\x77\x57\x89\xa5"
+ "\xb7\xa7\xf5\x04\xbb\xf3\xd2\x28"
+ "\xf4\x43\xe3\xca\x4d\x62\xb5\x9a"
+ "\xca\x84\xe9\x90\xca\xca\xf5\xc5"
+ "\x2b\x09\x30\xda\xa2\x3d\xe9\x4c"
+ "\xe8\x70\x17\xba\x2d\x84\x98\x8d"
+ "\xdf\xc9\xc5\x8d\xb6\x7a\xad\xa6"
+ "\x13\xc2\xdd\x08\x45\x79\x41\xa6",
+ .rlen = 64,
+ }, { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55"
+ "\x0F\x32\x55\x78\x9B\xBE\x78\x9B"
+ "\xBE\xE1\x04\x27\xE1\x04\x27\x4A"
+ "\x6D\x90\x4A\x6D\x90\xB3\xD6\xF9",
+ .klen = 32,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
+ "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB"
+ "\x54\xE0\x49\xB2\x1B\xA7\x10\x79"
+ "\x05\x6E\xD7\x40\xCC\x35\x9E\x07"
+ "\x93\xFC\x65\xF1\x5A\xC3\x2C\xB8"
+ "\x21\x8A\x16\x7F\xE8\x51\xDD\x46"
+ "\xAF\x18\xA4\x0D\x76\x02\x6B\xD4"
+ "\x3D\xC9\x32\x9B\x04\x90\xF9\x62"
+ "\xEE\x57\xC0\x29\xB5\x1E\x87\x13"
+ "\x7C\xE5\x4E\xDA\x43\xAC\x15\xA1"
+ "\x0A\x73\xFF\x68\xD1\x3A\xC6\x2F"
+ "\x98\x01\x8D\xF6\x5F\xEB\x54\xBD"
+ "\x26\xB2\x1B\x84\x10\x79\xE2\x4B"
+ "\xD7\x40\xA9\x12\x9E\x07\x70\xFC"
+ "\x65\xCE\x37\xC3\x2C\x95\x21\x8A"
+ "\xF3\x5C\xE8\x51\xBA\x23\xAF\x18"
+ "\x81\x0D\x76\xDF\x48\xD4\x3D\xA6"
+ "\x0F\x9B\x04\x6D\xF9\x62\xCB\x34"
+ "\xC0\x29\x92\x1E\x87\xF0\x59\xE5"
+ "\x4E\xB7\x20\xAC\x15\x7E\x0A\x73"
+ "\xDC\x45\xD1\x3A\xA3\x0C\x98\x01"
+ "\x6A\xF6\x5F\xC8\x31\xBD\x26\x8F"
+ "\x1B\x84\xED\x56\xE2\x4B\xB4\x1D"
+ "\xA9\x12\x7B\x07\x70\xD9\x42\xCE"
+ "\x37\xA0\x09\x95\xFE\x67\xF3\x5C"
+ "\xC5\x2E\xBA\x23\x8C\x18\x81\xEA"
+ "\x53\xDF\x48\xB1\x1A\xA6\x0F\x78"
+ "\x04\x6D\xD6\x3F\xCB\x34\x9D\x06"
+ "\x92\xFB\x64\xF0\x59\xC2\x2B\xB7"
+ "\x20\x89\x15\x7E\xE7\x50\xDC\x45"
+ "\xAE\x17\xA3\x0C\x75\x01\x6A\xD3"
+ "\x3C\xC8\x31\x9A\x03\x8F\xF8\x61"
+ "\xED\x56\xBF\x28\xB4\x1D\x86\x12",
+ .ilen = 496,
+ .result = "\x04\xF3\xD3\x88\x17\xEF\xDC\xEF"
+ "\x8B\x04\xF8\x3A\x66\x8D\x1A\x53"
+ "\x57\x1F\x4B\x23\xE4\xA0\xAF\xF9"
+ "\x69\x95\x35\x98\x8D\x4D\x8C\xC1"
+ "\xF0\xB2\x7F\x80\xBB\x54\x28\xA2"
+ "\x7A\x1B\x9F\x77\xEC\x0E\x6E\xDE"
+ "\xF0\xEC\xB8\xE4\x20\x62\xEE\xDB"
+ "\x5D\xF5\xDD\xE3\x54\xFC\xDD\xEB"
+ "\x6A\xEE\x65\xA1\x21\xD6\xD7\x81"
+ "\x47\x61\x12\x4D\xC2\x8C\xFA\x78"
+ "\x1F\x28\x02\x01\xC3\xFC\x1F\xEC"
+ "\x0F\x10\x4F\xB3\x12\x45\xC6\x3B"
+ "\x7E\x08\xF9\x5A\xD0\x5D\x73\x2D"
+ "\x58\xA4\xE5\xCB\x1C\xB4\xCE\x74"
+ "\x32\x41\x1F\x31\x9C\x08\xA2\x5D"
+ "\x67\xEB\x72\x1D\xF8\xE7\x70\x54"
+ "\x34\x4B\x31\x69\x84\x66\x96\x44"
+ "\x56\xCC\x1E\xD9\xE6\x13\x6A\xB9"
+ "\x2D\x0A\x05\x45\x2D\x90\xCC\xDF"
+ "\x16\x5C\x5F\x79\x34\x52\x54\xFE"
+ "\xFE\xCD\xAD\x04\x2E\xAD\x86\x06"
+ "\x1F\x37\xE8\x28\xBC\xD3\x8F\x5B"
+ "\x92\x66\x87\x3B\x8A\x0A\x1A\xCC"
+ "\x6E\xAB\x9F\x0B\xFA\x5C\xE6\xFD"
+ "\x3C\x98\x08\x12\xEC\xAA\x9E\x11"
+ "\xCA\xB2\x1F\xCE\x5E\x5B\xB2\x72"
+ "\x9C\xCC\x5D\xC5\xE0\x32\xC0\x56"
+ "\xD5\x45\x16\xD2\xAF\x13\x66\xF7"
+ "\x8C\x67\xAC\x79\xB2\xAF\x56\x27"
+ "\x3F\xCC\xFE\xCB\x1E\xC0\x75\xF1"
+ "\xA7\xC9\xC3\x1D\x8E\xDD\xF9\xD4"
+ "\x42\xC8\x21\x08\x16\xF7\x01\xD7"
+ "\xAC\x8E\x3F\x1D\x56\xC1\x06\xE4"
+ "\x9C\x62\xD6\xA5\x6A\x50\x44\xB3"
+ "\x35\x1C\x82\xB9\x10\xF9\x42\xA1"
+ "\xFC\x74\x9B\x44\x4F\x25\x02\xE3"
+ "\x08\xF5\xD4\x32\x39\x08\x11\xE8"
+ "\xD2\x6B\x50\x53\xD4\x08\xD1\x6B"
+ "\x3A\x4A\x68\x7B\x7C\xCD\x46\x5E"
+ "\x0D\x07\x19\xDB\x67\xD7\x98\x91"
+ "\xD7\x17\x10\x9B\x7B\x8A\x9B\x33"
+ "\xAE\xF3\x00\xA6\xD4\x15\xD9\xEA"
+ "\x85\x99\x22\xE8\x91\x38\x70\x83"
+ "\x93\x01\x24\x6C\xFA\x9A\xB9\x07"
+ "\xEA\x8D\x3B\xD9\x2A\x43\x59\x16"
+ "\x2F\x69\xEE\x84\x36\x44\x76\x98"
+ "\xF3\x04\x2A\x7C\x74\x3D\x29\x2B"
+ "\x0D\xAD\x8F\x44\x82\x9E\x57\x8D"
+ "\xAC\xED\x18\x1F\x50\xA4\xF5\x98"
+ "\x1F\xBD\x92\x91\x1B\x2D\xA6\xD6"
+ "\xD2\xE3\x02\xAA\x92\x3B\xC6\xB3"
+ "\x1B\x39\x72\xD5\x26\xCA\x04\xE0"
+ "\xFC\x58\x78\xBB\xB1\x3F\xA1\x9C"
+ "\x42\x24\x3E\x2E\x22\xBB\x4B\xBA"
+ "\xF4\x52\x0A\xE6\xAE\x47\xB4\x7D"
+ "\x1D\xA8\xBE\x81\x1A\x75\xDA\xAC"
+ "\xA6\x25\x1E\xEF\x3A\xC0\x6C\x63"
+ "\xEF\xDC\xC9\x79\x10\x26\xE8\x61"
+ "\x29\xFC\xA4\x05\xDF\x7D\x5C\x63"
+ "\x10\x09\x9B\x46\x9B\xF2\x2C\x2B"
+ "\xFA\x3A\x05\x4C\xFA\xD1\xFF\xFE"
+ "\xF1\x4C\xE5\xB2\x91\x64\x0C\x51",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ }, { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55"
+ "\x0F\x32\x55\x78\x9B\xBE\x78\x9B"
+ "\xBE\xE1\x04\x27\xE1\x04\x27\x4A"
+ "\x6D\x90\x4A\x6D\x90\xB3\xD6\xF9",
+ .klen = 32,
+ .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47"
+ "\xE2\x7D\x18\xD6\x71\x0C\xA7\x42",
+ .input = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB"
+ "\x54\xE0\x49\xB2\x1B\xA7\x10\x79"
+ "\x05\x6E\xD7\x40\xCC\x35\x9E\x07"
+ "\x93\xFC\x65\xF1\x5A\xC3\x2C\xB8"
+ "\x21\x8A\x16\x7F\xE8\x51\xDD\x46"
+ "\xAF\x18\xA4\x0D\x76\x02\x6B\xD4"
+ "\x3D\xC9\x32\x9B\x04\x90\xF9\x62"
+ "\xEE\x57\xC0\x29\xB5\x1E\x87\x13"
+ "\x7C\xE5\x4E\xDA\x43\xAC\x15\xA1"
+ "\x0A\x73\xFF\x68\xD1\x3A\xC6\x2F"
+ "\x98\x01\x8D\xF6\x5F\xEB\x54\xBD"
+ "\x26\xB2\x1B\x84\x10\x79\xE2\x4B"
+ "\xD7\x40\xA9\x12\x9E\x07\x70\xFC"
+ "\x65\xCE\x37\xC3\x2C\x95\x21\x8A"
+ "\xF3\x5C\xE8\x51\xBA\x23\xAF\x18"
+ "\x81\x0D\x76\xDF\x48\xD4\x3D\xA6"
+ "\x0F\x9B\x04\x6D\xF9\x62\xCB\x34"
+ "\xC0\x29\x92\x1E\x87\xF0\x59\xE5"
+ "\x4E\xB7\x20\xAC\x15\x7E\x0A\x73"
+ "\xDC\x45\xD1\x3A\xA3\x0C\x98\x01"
+ "\x6A\xF6\x5F\xC8\x31\xBD\x26\x8F"
+ "\x1B\x84\xED\x56\xE2\x4B\xB4\x1D"
+ "\xA9\x12\x7B\x07\x70\xD9\x42\xCE"
+ "\x37\xA0\x09\x95\xFE\x67\xF3\x5C"
+ "\xC5\x2E\xBA\x23\x8C\x18\x81\xEA"
+ "\x53\xDF\x48\xB1\x1A\xA6\x0F\x78"
+ "\x04\x6D\xD6\x3F\xCB\x34\x9D\x06"
+ "\x92\xFB\x64\xF0\x59\xC2\x2B\xB7"
+ "\x20\x89\x15\x7E\xE7\x50\xDC\x45"
+ "\xAE\x17\xA3\x0C\x75\x01\x6A\xD3"
+ "\x3C\xC8\x31\x9A\x03\x8F\xF8\x61"
+ "\xED\x56\xBF\x28\xB4\x1D\x86\x12"
+ "\x7B\xE4\x4D",
+ .ilen = 499,
+ .result = "\xDA\x4E\x3F\xBC\xE8\xB6\x3A\xA2"
+ "\xD5\x4D\x84\x4A\xA9\x0C\xE1\xA5"
+ "\xB8\x73\xBC\xF9\xBB\x59\x2F\x44"
+ "\x8B\xAB\x82\x6C\xB4\x32\x9A\xDE"
+ "\x5A\x0B\xDB\x7A\x6B\xF2\x38\x9F"
+ "\x06\xF7\xF7\xFF\xFF\xC0\x8A\x2E"
+ "\x76\xEA\x06\x32\x23\xF3\x59\x2E"
+ "\x75\xDE\x71\x86\x3C\x98\x23\x44"
+ "\x5B\xF2\xFA\x6A\x00\xBB\xC1\xAD"
+ "\x58\xBD\x3E\x6F\x2E\xB4\x19\x04"
+ "\x70\x8B\x92\x55\x23\xE9\x6A\x3A"
+ "\x78\x7A\x1B\x10\x85\x52\x9C\x12"
+ "\xE4\x55\x81\x21\xCE\x53\xD0\x3B"
+ "\x63\x77\x2C\x74\xD1\xF5\x60\xF3"
+ "\xA1\xDE\x44\x3C\x8F\x4D\x2F\xDD"
+ "\x8A\xFE\x3C\x42\x8E\xD3\xF2\x8E"
+ "\xA8\x28\x69\x65\x31\xE1\x45\x83"
+ "\xE4\x49\xC4\x9C\xA7\x28\xAA\x21"
+ "\xCD\x5D\x0F\x15\xB7\x93\x07\x26"
+ "\xB0\x65\x6D\x91\x90\x23\x7A\xC6"
+ "\xDB\x68\xB0\xA1\x8E\xA4\x76\x4E"
+ "\xC6\x91\x83\x20\x92\x4D\x63\x7A"
+ "\x45\x18\x18\x74\x19\xAD\x71\x01"
+ "\x6B\x23\xAD\x9D\x4E\xE4\x6E\x46"
+ "\xC9\x73\x7A\xF9\x02\x95\xF4\x07"
+ "\x0E\x7A\xA6\xC5\xAE\xFA\x15\x2C"
+ "\x51\x71\xF1\xDC\x22\xB6\xAC\xD8"
+ "\x19\x24\x44\xBC\x0C\xFB\x3C\x2D"
+ "\xB1\x50\x47\x15\x0E\xDB\xB6\xD7"
+ "\xE8\x61\xE5\x95\x52\x1E\x3E\x49"
+ "\x70\xE9\x66\x04\x4C\xE1\xAF\xBD"
+ "\xDD\x15\x3B\x20\x59\x24\xFF\xB0"
+ "\x39\xAA\xE7\xBF\x23\xA3\x6E\xD5"
+ "\x15\xF0\x61\x4F\xAE\x89\x10\x58"
+ "\x5A\x33\x95\x52\x2A\xB5\x77\x9C"
+ "\xA5\x43\x80\x40\x27\x2D\xAE\xD9"
+ "\x3F\xE0\x80\x94\x78\x79\xCB\x7E"
+ "\xAD\x12\x44\x4C\xEC\x27\xB0\xEE"
+ "\x0B\x05\x2A\x82\x99\x58\xBB\x7A"
+ "\x8D\x6D\x9D\x8E\xE2\x8E\xE7\x93"
+ "\x2F\xB3\x09\x8D\x06\xD5\xEE\x70"
+ "\x16\xAE\x35\xC5\x52\x0F\x46\x1F"
+ "\x71\xF9\x5E\xF2\x67\xDC\x98\x2F"
+ "\xA3\x23\xAA\xD5\xD0\x49\xF4\xA6"
+ "\xF6\xB8\x32\xCD\xD6\x85\x73\x60"
+ "\x59\x20\xE7\x55\x0E\x91\xE2\x0C"
+ "\x3F\x1C\xEB\x3D\xDF\x52\x64\xF2"
+ "\x7D\x8B\x5D\x63\x16\xB9\xB2\x5D"
+ "\x5E\xAB\xB2\x97\xAB\x78\x44\xE7"
+ "\xC6\x72\x20\xC5\x90\x9B\xDC\x5D"
+ "\xB0\xEF\x44\xEF\x87\x31\x8D\xF4"
+ "\xFB\x81\x5D\xF7\x96\x96\xD4\x50"
+ "\x89\xA7\xF6\xB9\x67\x76\x40\x9E"
+ "\x9D\x40\xD5\x2C\x30\xB8\x01\x8F"
+ "\xE4\x7B\x71\x48\xA9\xA0\xA0\x1D"
+ "\x87\x52\xA4\x91\xA9\xD7\xA9\x51"
+ "\xD9\x59\xF7\xCC\x63\x22\xC1\x8D"
+ "\x84\x7B\xD8\x22\x32\x5C\x6F\x1D"
+ "\x6E\x9F\xFA\xDD\x49\x40\xDC\x37"
+ "\x14\x8C\xE1\x80\x1B\xDD\x36\x2A"
+ "\xD0\xE9\x54\x99\x5D\xBA\x3B\x11"
+ "\xD8\xFE\xC9\x5B\x5C\x25\xE5\x76"
+ "\xFB\xF2\x3F",
+ .rlen = 499,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 499 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec aes_ctr_dec_tv_template[] = {
+ { /* From NIST Special Publication 800-38A, Appendix F.5 */
+ .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6"
+ "\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
+ .klen = 16,
+ .iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .input = "\x87\x4d\x61\x91\xb6\x20\xe3\x26"
+ "\x1b\xef\x68\x64\x99\x0d\xb6\xce"
+ "\x98\x06\xf6\x6b\x79\x70\xfd\xff"
+ "\x86\x17\x18\x7b\xb9\xff\xfd\xff"
+ "\x5a\xe4\xdf\x3e\xdb\xd5\xd3\x5e"
+ "\x5b\x4f\x09\x02\x0d\xb0\x3e\xab"
+ "\x1e\x03\x1d\xda\x2f\xbe\x03\xd1"
+ "\x79\x21\x70\xa0\xf3\x00\x9c\xee",
+ .ilen = 64,
+ .result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .rlen = 64,
+ }, {
+ .key = "\x8e\x73\xb0\xf7\xda\x0e\x64\x52"
+ "\xc8\x10\xf3\x2b\x80\x90\x79\xe5"
+ "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b",
+ .klen = 24,
+ .iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .input = "\x1a\xbc\x93\x24\x17\x52\x1c\xa2"
+ "\x4f\x2b\x04\x59\xfe\x7e\x6e\x0b"
+ "\x09\x03\x39\xec\x0a\xa6\xfa\xef"
+ "\xd5\xcc\xc2\xc6\xf4\xce\x8e\x94"
+ "\x1e\x36\xb2\x6b\xd1\xeb\xc6\x70"
+ "\xd1\xbd\x1d\x66\x56\x20\xab\xf7"
+ "\x4f\x78\xa7\xf6\xd2\x98\x09\x58"
+ "\x5a\x97\xda\xec\x58\xc6\xb0\x50",
+ .ilen = 64,
+ .result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .rlen = 64,
+ }, {
+ .key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe"
+ "\x2b\x73\xae\xf0\x85\x7d\x77\x81"
+ "\x1f\x35\x2c\x07\x3b\x61\x08\xd7"
+ "\x2d\x98\x10\xa3\x09\x14\xdf\xf4",
+ .klen = 32,
+ .iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .input = "\x60\x1e\xc3\x13\x77\x57\x89\xa5"
+ "\xb7\xa7\xf5\x04\xbb\xf3\xd2\x28"
+ "\xf4\x43\xe3\xca\x4d\x62\xb5\x9a"
+ "\xca\x84\xe9\x90\xca\xca\xf5\xc5"
+ "\x2b\x09\x30\xda\xa2\x3d\xe9\x4c"
+ "\xe8\x70\x17\xba\x2d\x84\x98\x8d"
+ "\xdf\xc9\xc5\x8d\xb6\x7a\xad\xa6"
+ "\x13\xc2\xdd\x08\x45\x79\x41\xa6",
+ .ilen = 64,
+ .result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .rlen = 64,
+ }, { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55"
+ "\x0F\x32\x55\x78\x9B\xBE\x78\x9B"
+ "\xBE\xE1\x04\x27\xE1\x04\x27\x4A"
+ "\x6D\x90\x4A\x6D\x90\xB3\xD6\xF9",
+ .klen = 32,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
+ "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x04\xF3\xD3\x88\x17\xEF\xDC\xEF"
+ "\x8B\x04\xF8\x3A\x66\x8D\x1A\x53"
+ "\x57\x1F\x4B\x23\xE4\xA0\xAF\xF9"
+ "\x69\x95\x35\x98\x8D\x4D\x8C\xC1"
+ "\xF0\xB2\x7F\x80\xBB\x54\x28\xA2"
+ "\x7A\x1B\x9F\x77\xEC\x0E\x6E\xDE"
+ "\xF0\xEC\xB8\xE4\x20\x62\xEE\xDB"
+ "\x5D\xF5\xDD\xE3\x54\xFC\xDD\xEB"
+ "\x6A\xEE\x65\xA1\x21\xD6\xD7\x81"
+ "\x47\x61\x12\x4D\xC2\x8C\xFA\x78"
+ "\x1F\x28\x02\x01\xC3\xFC\x1F\xEC"
+ "\x0F\x10\x4F\xB3\x12\x45\xC6\x3B"
+ "\x7E\x08\xF9\x5A\xD0\x5D\x73\x2D"
+ "\x58\xA4\xE5\xCB\x1C\xB4\xCE\x74"
+ "\x32\x41\x1F\x31\x9C\x08\xA2\x5D"
+ "\x67\xEB\x72\x1D\xF8\xE7\x70\x54"
+ "\x34\x4B\x31\x69\x84\x66\x96\x44"
+ "\x56\xCC\x1E\xD9\xE6\x13\x6A\xB9"
+ "\x2D\x0A\x05\x45\x2D\x90\xCC\xDF"
+ "\x16\x5C\x5F\x79\x34\x52\x54\xFE"
+ "\xFE\xCD\xAD\x04\x2E\xAD\x86\x06"
+ "\x1F\x37\xE8\x28\xBC\xD3\x8F\x5B"
+ "\x92\x66\x87\x3B\x8A\x0A\x1A\xCC"
+ "\x6E\xAB\x9F\x0B\xFA\x5C\xE6\xFD"
+ "\x3C\x98\x08\x12\xEC\xAA\x9E\x11"
+ "\xCA\xB2\x1F\xCE\x5E\x5B\xB2\x72"
+ "\x9C\xCC\x5D\xC5\xE0\x32\xC0\x56"
+ "\xD5\x45\x16\xD2\xAF\x13\x66\xF7"
+ "\x8C\x67\xAC\x79\xB2\xAF\x56\x27"
+ "\x3F\xCC\xFE\xCB\x1E\xC0\x75\xF1"
+ "\xA7\xC9\xC3\x1D\x8E\xDD\xF9\xD4"
+ "\x42\xC8\x21\x08\x16\xF7\x01\xD7"
+ "\xAC\x8E\x3F\x1D\x56\xC1\x06\xE4"
+ "\x9C\x62\xD6\xA5\x6A\x50\x44\xB3"
+ "\x35\x1C\x82\xB9\x10\xF9\x42\xA1"
+ "\xFC\x74\x9B\x44\x4F\x25\x02\xE3"
+ "\x08\xF5\xD4\x32\x39\x08\x11\xE8"
+ "\xD2\x6B\x50\x53\xD4\x08\xD1\x6B"
+ "\x3A\x4A\x68\x7B\x7C\xCD\x46\x5E"
+ "\x0D\x07\x19\xDB\x67\xD7\x98\x91"
+ "\xD7\x17\x10\x9B\x7B\x8A\x9B\x33"
+ "\xAE\xF3\x00\xA6\xD4\x15\xD9\xEA"
+ "\x85\x99\x22\xE8\x91\x38\x70\x83"
+ "\x93\x01\x24\x6C\xFA\x9A\xB9\x07"
+ "\xEA\x8D\x3B\xD9\x2A\x43\x59\x16"
+ "\x2F\x69\xEE\x84\x36\x44\x76\x98"
+ "\xF3\x04\x2A\x7C\x74\x3D\x29\x2B"
+ "\x0D\xAD\x8F\x44\x82\x9E\x57\x8D"
+ "\xAC\xED\x18\x1F\x50\xA4\xF5\x98"
+ "\x1F\xBD\x92\x91\x1B\x2D\xA6\xD6"
+ "\xD2\xE3\x02\xAA\x92\x3B\xC6\xB3"
+ "\x1B\x39\x72\xD5\x26\xCA\x04\xE0"
+ "\xFC\x58\x78\xBB\xB1\x3F\xA1\x9C"
+ "\x42\x24\x3E\x2E\x22\xBB\x4B\xBA"
+ "\xF4\x52\x0A\xE6\xAE\x47\xB4\x7D"
+ "\x1D\xA8\xBE\x81\x1A\x75\xDA\xAC"
+ "\xA6\x25\x1E\xEF\x3A\xC0\x6C\x63"
+ "\xEF\xDC\xC9\x79\x10\x26\xE8\x61"
+ "\x29\xFC\xA4\x05\xDF\x7D\x5C\x63"
+ "\x10\x09\x9B\x46\x9B\xF2\x2C\x2B"
+ "\xFA\x3A\x05\x4C\xFA\xD1\xFF\xFE"
+ "\xF1\x4C\xE5\xB2\x91\x64\x0C\x51",
+ .ilen = 496,
+ .result = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB"
+ "\x54\xE0\x49\xB2\x1B\xA7\x10\x79"
+ "\x05\x6E\xD7\x40\xCC\x35\x9E\x07"
+ "\x93\xFC\x65\xF1\x5A\xC3\x2C\xB8"
+ "\x21\x8A\x16\x7F\xE8\x51\xDD\x46"
+ "\xAF\x18\xA4\x0D\x76\x02\x6B\xD4"
+ "\x3D\xC9\x32\x9B\x04\x90\xF9\x62"
+ "\xEE\x57\xC0\x29\xB5\x1E\x87\x13"
+ "\x7C\xE5\x4E\xDA\x43\xAC\x15\xA1"
+ "\x0A\x73\xFF\x68\xD1\x3A\xC6\x2F"
+ "\x98\x01\x8D\xF6\x5F\xEB\x54\xBD"
+ "\x26\xB2\x1B\x84\x10\x79\xE2\x4B"
+ "\xD7\x40\xA9\x12\x9E\x07\x70\xFC"
+ "\x65\xCE\x37\xC3\x2C\x95\x21\x8A"
+ "\xF3\x5C\xE8\x51\xBA\x23\xAF\x18"
+ "\x81\x0D\x76\xDF\x48\xD4\x3D\xA6"
+ "\x0F\x9B\x04\x6D\xF9\x62\xCB\x34"
+ "\xC0\x29\x92\x1E\x87\xF0\x59\xE5"
+ "\x4E\xB7\x20\xAC\x15\x7E\x0A\x73"
+ "\xDC\x45\xD1\x3A\xA3\x0C\x98\x01"
+ "\x6A\xF6\x5F\xC8\x31\xBD\x26\x8F"
+ "\x1B\x84\xED\x56\xE2\x4B\xB4\x1D"
+ "\xA9\x12\x7B\x07\x70\xD9\x42\xCE"
+ "\x37\xA0\x09\x95\xFE\x67\xF3\x5C"
+ "\xC5\x2E\xBA\x23\x8C\x18\x81\xEA"
+ "\x53\xDF\x48\xB1\x1A\xA6\x0F\x78"
+ "\x04\x6D\xD6\x3F\xCB\x34\x9D\x06"
+ "\x92\xFB\x64\xF0\x59\xC2\x2B\xB7"
+ "\x20\x89\x15\x7E\xE7\x50\xDC\x45"
+ "\xAE\x17\xA3\x0C\x75\x01\x6A\xD3"
+ "\x3C\xC8\x31\x9A\x03\x8F\xF8\x61"
+ "\xED\x56\xBF\x28\xB4\x1D\x86\x12",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ }, { /* Generated with Crypto++ */
+ .key = "\xC9\x83\xA6\xC9\xEC\x0F\x32\x55"
+ "\x0F\x32\x55\x78\x9B\xBE\x78\x9B"
+ "\xBE\xE1\x04\x27\xE1\x04\x27\x4A"
+ "\x6D\x90\x4A\x6D\x90\xB3\xD6\xF9",
+ .klen = 32,
+ .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47"
+ "\xE2\x7D\x18\xD6\x71\x0C\xA7\x42",
+ .input = "\xDA\x4E\x3F\xBC\xE8\xB6\x3A\xA2"
+ "\xD5\x4D\x84\x4A\xA9\x0C\xE1\xA5"
+ "\xB8\x73\xBC\xF9\xBB\x59\x2F\x44"
+ "\x8B\xAB\x82\x6C\xB4\x32\x9A\xDE"
+ "\x5A\x0B\xDB\x7A\x6B\xF2\x38\x9F"
+ "\x06\xF7\xF7\xFF\xFF\xC0\x8A\x2E"
+ "\x76\xEA\x06\x32\x23\xF3\x59\x2E"
+ "\x75\xDE\x71\x86\x3C\x98\x23\x44"
+ "\x5B\xF2\xFA\x6A\x00\xBB\xC1\xAD"
+ "\x58\xBD\x3E\x6F\x2E\xB4\x19\x04"
+ "\x70\x8B\x92\x55\x23\xE9\x6A\x3A"
+ "\x78\x7A\x1B\x10\x85\x52\x9C\x12"
+ "\xE4\x55\x81\x21\xCE\x53\xD0\x3B"
+ "\x63\x77\x2C\x74\xD1\xF5\x60\xF3"
+ "\xA1\xDE\x44\x3C\x8F\x4D\x2F\xDD"
+ "\x8A\xFE\x3C\x42\x8E\xD3\xF2\x8E"
+ "\xA8\x28\x69\x65\x31\xE1\x45\x83"
+ "\xE4\x49\xC4\x9C\xA7\x28\xAA\x21"
+ "\xCD\x5D\x0F\x15\xB7\x93\x07\x26"
+ "\xB0\x65\x6D\x91\x90\x23\x7A\xC6"
+ "\xDB\x68\xB0\xA1\x8E\xA4\x76\x4E"
+ "\xC6\x91\x83\x20\x92\x4D\x63\x7A"
+ "\x45\x18\x18\x74\x19\xAD\x71\x01"
+ "\x6B\x23\xAD\x9D\x4E\xE4\x6E\x46"
+ "\xC9\x73\x7A\xF9\x02\x95\xF4\x07"
+ "\x0E\x7A\xA6\xC5\xAE\xFA\x15\x2C"
+ "\x51\x71\xF1\xDC\x22\xB6\xAC\xD8"
+ "\x19\x24\x44\xBC\x0C\xFB\x3C\x2D"
+ "\xB1\x50\x47\x15\x0E\xDB\xB6\xD7"
+ "\xE8\x61\xE5\x95\x52\x1E\x3E\x49"
+ "\x70\xE9\x66\x04\x4C\xE1\xAF\xBD"
+ "\xDD\x15\x3B\x20\x59\x24\xFF\xB0"
+ "\x39\xAA\xE7\xBF\x23\xA3\x6E\xD5"
+ "\x15\xF0\x61\x4F\xAE\x89\x10\x58"
+ "\x5A\x33\x95\x52\x2A\xB5\x77\x9C"
+ "\xA5\x43\x80\x40\x27\x2D\xAE\xD9"
+ "\x3F\xE0\x80\x94\x78\x79\xCB\x7E"
+ "\xAD\x12\x44\x4C\xEC\x27\xB0\xEE"
+ "\x0B\x05\x2A\x82\x99\x58\xBB\x7A"
+ "\x8D\x6D\x9D\x8E\xE2\x8E\xE7\x93"
+ "\x2F\xB3\x09\x8D\x06\xD5\xEE\x70"
+ "\x16\xAE\x35\xC5\x52\x0F\x46\x1F"
+ "\x71\xF9\x5E\xF2\x67\xDC\x98\x2F"
+ "\xA3\x23\xAA\xD5\xD0\x49\xF4\xA6"
+ "\xF6\xB8\x32\xCD\xD6\x85\x73\x60"
+ "\x59\x20\xE7\x55\x0E\x91\xE2\x0C"
+ "\x3F\x1C\xEB\x3D\xDF\x52\x64\xF2"
+ "\x7D\x8B\x5D\x63\x16\xB9\xB2\x5D"
+ "\x5E\xAB\xB2\x97\xAB\x78\x44\xE7"
+ "\xC6\x72\x20\xC5\x90\x9B\xDC\x5D"
+ "\xB0\xEF\x44\xEF\x87\x31\x8D\xF4"
+ "\xFB\x81\x5D\xF7\x96\x96\xD4\x50"
+ "\x89\xA7\xF6\xB9\x67\x76\x40\x9E"
+ "\x9D\x40\xD5\x2C\x30\xB8\x01\x8F"
+ "\xE4\x7B\x71\x48\xA9\xA0\xA0\x1D"
+ "\x87\x52\xA4\x91\xA9\xD7\xA9\x51"
+ "\xD9\x59\xF7\xCC\x63\x22\xC1\x8D"
+ "\x84\x7B\xD8\x22\x32\x5C\x6F\x1D"
+ "\x6E\x9F\xFA\xDD\x49\x40\xDC\x37"
+ "\x14\x8C\xE1\x80\x1B\xDD\x36\x2A"
+ "\xD0\xE9\x54\x99\x5D\xBA\x3B\x11"
+ "\xD8\xFE\xC9\x5B\x5C\x25\xE5\x76"
+ "\xFB\xF2\x3F",
+ .ilen = 499,
+ .result = "\x50\xB9\x22\xAE\x17\x80\x0C\x75"
+ "\xDE\x47\xD3\x3C\xA5\x0E\x9A\x03"
+ "\x6C\xF8\x61\xCA\x33\xBF\x28\x91"
+ "\x1D\x86\xEF\x58\xE4\x4D\xB6\x1F"
+ "\xAB\x14\x7D\x09\x72\xDB\x44\xD0"
+ "\x39\xA2\x0B\x97\x00\x69\xF5\x5E"
+ "\xC7\x30\xBC\x25\x8E\x1A\x83\xEC"
+ "\x55\xE1\x4A\xB3\x1C\xA8\x11\x7A"
+ "\x06\x6F\xD8\x41\xCD\x36\x9F\x08"
+ "\x94\xFD\x66\xF2\x5B\xC4\x2D\xB9"
+ "\x22\x8B\x17\x80\xE9\x52\xDE\x47"
+ "\xB0\x19\xA5\x0E\x77\x03\x6C\xD5"
+ "\x3E\xCA\x33\x9C\x05\x91\xFA\x63"
+ "\xEF\x58\xC1\x2A\xB6\x1F\x88\x14"
+ "\x7D\xE6\x4F\xDB\x44\xAD\x16\xA2"
+ "\x0B\x74\x00\x69\xD2\x3B\xC7\x30"
+ "\x99\x02\x8E\xF7\x60\xEC\x55\xBE"
+ "\x27\xB3\x1C\x85\x11\x7A\xE3\x4C"
+ "\xD8\x41\xAA\x13\x9F\x08\x71\xFD"
+ "\x66\xCF\x38\xC4\x2D\x96\x22\x8B"
+ "\xF4\x5D\xE9\x52\xBB\x24\xB0\x19"
+ "\x82\x0E\x77\xE0\x49\xD5\x3E\xA7"
+ "\x10\x9C\x05\x6E\xFA\x63\xCC\x35"
+ "\xC1\x2A\x93\x1F\x88\xF1\x5A\xE6"
+ "\x4F\xB8\x21\xAD\x16\x7F\x0B\x74"
+ "\xDD\x46\xD2\x3B\xA4\x0D\x99\x02"
+ "\x6B\xF7\x60\xC9\x32\xBE\x27\x90"
+ "\x1C\x85\xEE\x57\xE3\x4C\xB5\x1E"
+ "\xAA\x13\x7C\x08\x71\xDA\x43\xCF"
+ "\x38\xA1\x0A\x96\xFF\x68\xF4\x5D"
+ "\xC6\x2F\xBB\x24\x8D\x19\x82\xEB"
+ "\x54\xE0\x49\xB2\x1B\xA7\x10\x79"
+ "\x05\x6E\xD7\x40\xCC\x35\x9E\x07"
+ "\x93\xFC\x65\xF1\x5A\xC3\x2C\xB8"
+ "\x21\x8A\x16\x7F\xE8\x51\xDD\x46"
+ "\xAF\x18\xA4\x0D\x76\x02\x6B\xD4"
+ "\x3D\xC9\x32\x9B\x04\x90\xF9\x62"
+ "\xEE\x57\xC0\x29\xB5\x1E\x87\x13"
+ "\x7C\xE5\x4E\xDA\x43\xAC\x15\xA1"
+ "\x0A\x73\xFF\x68\xD1\x3A\xC6\x2F"
+ "\x98\x01\x8D\xF6\x5F\xEB\x54\xBD"
+ "\x26\xB2\x1B\x84\x10\x79\xE2\x4B"
+ "\xD7\x40\xA9\x12\x9E\x07\x70\xFC"
+ "\x65\xCE\x37\xC3\x2C\x95\x21\x8A"
+ "\xF3\x5C\xE8\x51\xBA\x23\xAF\x18"
+ "\x81\x0D\x76\xDF\x48\xD4\x3D\xA6"
+ "\x0F\x9B\x04\x6D\xF9\x62\xCB\x34"
+ "\xC0\x29\x92\x1E\x87\xF0\x59\xE5"
+ "\x4E\xB7\x20\xAC\x15\x7E\x0A\x73"
+ "\xDC\x45\xD1\x3A\xA3\x0C\x98\x01"
+ "\x6A\xF6\x5F\xC8\x31\xBD\x26\x8F"
+ "\x1B\x84\xED\x56\xE2\x4B\xB4\x1D"
+ "\xA9\x12\x7B\x07\x70\xD9\x42\xCE"
+ "\x37\xA0\x09\x95\xFE\x67\xF3\x5C"
+ "\xC5\x2E\xBA\x23\x8C\x18\x81\xEA"
+ "\x53\xDF\x48\xB1\x1A\xA6\x0F\x78"
+ "\x04\x6D\xD6\x3F\xCB\x34\x9D\x06"
+ "\x92\xFB\x64\xF0\x59\xC2\x2B\xB7"
+ "\x20\x89\x15\x7E\xE7\x50\xDC\x45"
+ "\xAE\x17\xA3\x0C\x75\x01\x6A\xD3"
+ "\x3C\xC8\x31\x9A\x03\x8F\xF8\x61"
+ "\xED\x56\xBF\x28\xB4\x1D\x86\x12"
+ "\x7B\xE4\x4D",
+ .rlen = 499,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 499 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec aes_ctr_rfc3686_enc_tv_template[] = {
{ /* From RFC 3686 */
.key = "\xae\x68\x52\xf8\x12\x10\x67\xcc"
"\x4b\xf7\xa5\x76\x55\x77\xf3\x9e"
@@ -5109,7 +19051,7 @@ static struct cipher_testvec aes_ctr_enc_tv_template[] = {
},
};
-static struct cipher_testvec aes_ctr_dec_tv_template[] = {
+static struct cipher_testvec aes_ctr_rfc3686_dec_tv_template[] = {
{ /* From RFC 3686 */
.key = "\xae\x68\x52\xf8\x12\x10\x67\xcc"
"\x4b\xf7\xa5\x76\x55\x77\xf3\x9e"
@@ -5200,6 +19142,64 @@ static struct cipher_testvec aes_ctr_dec_tv_template[] = {
},
};
+static struct cipher_testvec aes_ofb_enc_tv_template[] = {
+ /* From NIST Special Publication 800-38A, Appendix F.5 */
+ {
+ .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6"
+ "\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
+ .klen = 16,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07\x08"
+ "\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .input = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .ilen = 64,
+ .result = "\x3b\x3f\xd9\x2e\xb7\x2d\xad\x20"
+ "\x33\x34\x49\xf8\xe8\x3c\xfb\x4a"
+ "\x77\x89\x50\x8d\x16\x91\x8f\x03\xf5"
+ "\x3c\x52\xda\xc5\x4e\xd8\x25"
+ "\x97\x40\x05\x1e\x9c\x5f\xec\xf6\x43"
+ "\x44\xf7\xa8\x22\x60\xed\xcc"
+ "\x30\x4c\x65\x28\xf6\x59\xc7\x78"
+ "\x66\xa5\x10\xd9\xc1\xd6\xae\x5e",
+ .rlen = 64,
+ }
+};
+
+static struct cipher_testvec aes_ofb_dec_tv_template[] = {
+ /* From NIST Special Publication 800-38A, Appendix F.5 */
+ {
+ .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6"
+ "\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
+ .klen = 16,
+ .iv = "\x00\x01\x02\x03\x04\x05\x06\x07\x08"
+ "\x09\x0a\x0b\x0c\x0d\x0e\x0f",
+ .input = "\x3b\x3f\xd9\x2e\xb7\x2d\xad\x20"
+ "\x33\x34\x49\xf8\xe8\x3c\xfb\x4a"
+ "\x77\x89\x50\x8d\x16\x91\x8f\x03\xf5"
+ "\x3c\x52\xda\xc5\x4e\xd8\x25"
+ "\x97\x40\x05\x1e\x9c\x5f\xec\xf6\x43"
+ "\x44\xf7\xa8\x22\x60\xed\xcc"
+ "\x30\x4c\x65\x28\xf6\x59\xc7\x78"
+ "\x66\xa5\x10\xd9\xc1\xd6\xae\x5e",
+ .ilen = 64,
+ .result = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96"
+ "\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c"
+ "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11"
+ "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17"
+ "\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
+ .rlen = 64,
+ }
+};
+
static struct aead_testvec aes_gcm_enc_tv_template[] = {
{ /* From McGrew & Viega - http://citeseer.ist.psu.edu/656989.html */
.key = zeroed_string,
@@ -5562,6 +19562,443 @@ static struct aead_testvec aes_gcm_dec_tv_template[] = {
}
};
+static struct aead_testvec aes_gcm_rfc4106_enc_tv_template[] = {
+ { /* Generated using Crypto++ */
+ .key = zeroed_string,
+ .klen = 20,
+ .iv = zeroed_string,
+ .input = zeroed_string,
+ .ilen = 16,
+ .assoc = zeroed_string,
+ .alen = 8,
+ .result = "\x03\x88\xDA\xCE\x60\xB6\xA3\x92"
+ "\xF3\x28\xC2\xB9\x71\xB2\xFE\x78"
+ "\x97\xFE\x4C\x23\x37\x42\x01\xE0"
+ "\x81\x9F\x8D\xC5\xD7\x41\xA0\x1B",
+ .rlen = 32,
+ },{
+ .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c"
+ "\x6d\x6a\x8f\x94\x67\x30\x83\x08"
+ "\x00\x00\x00\x00",
+ .klen = 20,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x01"
+ "\x00\x00\x00\x00",
+ .input = zeroed_string,
+ .ilen = 16,
+ .assoc = zeroed_string,
+ .alen = 8,
+ .result = "\xC0\x0D\x8B\x42\x0F\x8F\x34\x18"
+ "\x88\xB1\xC5\xBC\xC5\xB6\xD6\x28"
+ "\x6A\x9D\xDF\x11\x5E\xFE\x5E\x9D"
+ "\x2F\x70\x44\x92\xF7\xF2\xE3\xEF",
+ .rlen = 32,
+
+ }, {
+ .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c"
+ "\x6d\x6a\x8f\x94\x67\x30\x83\x08"
+ "\x00\x00\x00\x00",
+ .klen = 20,
+ .iv = zeroed_string,
+ .input = "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .ilen = 16,
+ .assoc = zeroed_string,
+ .alen = 8,
+ .result = "\x4B\xB1\xB5\xE3\x25\x71\x70\xDE"
+ "\x7F\xC9\x9C\xA5\x14\x19\xF2\xAC"
+ "\x0B\x8F\x88\x69\x17\xE6\xB4\x3C"
+ "\xB1\x68\xFD\x14\x52\x64\x61\xB2",
+ .rlen = 32,
+ }, {
+ .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c"
+ "\x6d\x6a\x8f\x94\x67\x30\x83\x08"
+ "\x00\x00\x00\x00",
+ .klen = 20,
+ .iv = zeroed_string,
+ .input = "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .ilen = 16,
+ .assoc = "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .alen = 8,
+ .result = "\x4B\xB1\xB5\xE3\x25\x71\x70\xDE"
+ "\x7F\xC9\x9C\xA5\x14\x19\xF2\xAC"
+ "\x90\x92\xB7\xE3\x5F\xA3\x9A\x63"
+ "\x7E\xD7\x1F\xD8\xD3\x7C\x4B\xF5",
+ .rlen = 32,
+ }, {
+ .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c"
+ "\x6d\x6a\x8f\x94\x67\x30\x83\x08"
+ "\x00\x00\x00\x00",
+ .klen = 20,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x01"
+ "\x00\x00\x00\x00",
+ .input = "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .ilen = 16,
+ .assoc = "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .alen = 8,
+ .result = "\xC1\x0C\x8A\x43\x0E\x8E\x35\x19"
+ "\x89\xB0\xC4\xBD\xC4\xB7\xD7\x29"
+ "\x64\x50\xF9\x32\x13\xFB\x74\x61"
+ "\xF4\xED\x52\xD3\xC5\x10\x55\x3C",
+ .rlen = 32,
+ }, {
+ .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c"
+ "\x6d\x6a\x8f\x94\x67\x30\x83\x08"
+ "\x00\x00\x00\x00",
+ .klen = 20,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x01"
+ "\x00\x00\x00\x00",
+ .input = "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .ilen = 64,
+ .assoc = "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .alen = 8,
+ .result = "\xC1\x0C\x8A\x43\x0E\x8E\x35\x19"
+ "\x89\xB0\xC4\xBD\xC4\xB7\xD7\x29"
+ "\x98\x14\xA1\x42\x37\x80\xFD\x90"
+ "\x68\x12\x01\xA8\x91\x89\xB9\x83"
+ "\x5B\x11\x77\x12\x9B\xFF\x24\x89"
+ "\x94\x5F\x18\x12\xBA\x27\x09\x39"
+ "\x99\x96\x76\x42\x15\x1C\xCD\xCB"
+ "\xDC\xD3\xDA\x65\x73\xAF\x80\xCD"
+ "\xD2\xB6\xC2\x4A\x76\xC2\x92\x85"
+ "\xBD\xCF\x62\x98\x58\x14\xE5\xBD",
+ .rlen = 80,
+ }, {
+ .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x00\x00\x00\x00",
+ .klen = 20,
+ .iv = "\x00\x00\x45\x67\x89\xab\xcd\xef"
+ "\x00\x00\x00\x00",
+ .input = "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff",
+ .ilen = 192,
+ .assoc = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa"
+ "\xaa\xaa\xaa\xaa",
+ .alen = 12,
+ .result = "\xC1\x76\x33\x85\xE2\x9B\x5F\xDE"
+ "\xDE\x89\x3D\x42\xE7\xC9\x69\x8A"
+ "\x44\x6D\xC3\x88\x46\x2E\xC2\x01"
+ "\x5E\xF6\x0C\x39\xF0\xC4\xA5\x82"
+ "\xCD\xE8\x31\xCC\x0A\x4C\xE4\x44"
+ "\x41\xA9\x82\x6F\x22\xA1\x23\x1A"
+ "\xA8\xE3\x16\xFD\x31\x5C\x27\x31"
+ "\xF1\x7F\x01\x63\xA3\xAF\x70\xA1"
+ "\xCF\x07\x57\x41\x67\xD0\xC4\x42"
+ "\xDB\x18\xC6\x4C\x4C\xE0\x3D\x9F"
+ "\x05\x07\xFB\x13\x7D\x4A\xCA\x5B"
+ "\xF0\xBF\x64\x7E\x05\xB1\x72\xEE"
+ "\x7C\x3B\xD4\xCD\x14\x03\xB2\x2C"
+ "\xD3\xA9\xEE\xFA\x17\xFC\x9C\xDF"
+ "\xC7\x75\x40\xFF\xAE\xAD\x1E\x59"
+ "\x2F\x30\x24\xFB\xAD\x6B\x10\xFA"
+ "\x6C\x9F\x5B\xE7\x25\xD5\xD0\x25"
+ "\xAC\x4A\x4B\xDA\xFC\x7A\x85\x1B"
+ "\x7E\x13\x06\x82\x08\x17\xA4\x35"
+ "\xEC\xC5\x8D\x63\x96\x81\x0A\x8F"
+ "\xA3\x05\x38\x95\x20\x1A\x47\x04"
+ "\x6F\x6D\xDA\x8F\xEF\xC1\x76\x35"
+ "\x6B\xC7\x4D\x0F\x94\x12\xCA\x3E"
+ "\x2E\xD5\x03\x2E\x86\x7E\xAA\x3B"
+ "\x37\x08\x1C\xCF\xBA\x5D\x71\x46"
+ "\x80\x72\xB0\x4C\x82\x0D\x60\x3C",
+ .rlen = 208,
+ }
+};
+
+static struct aead_testvec aes_gcm_rfc4106_dec_tv_template[] = {
+ { /* Generated using Crypto++ */
+ .key = zeroed_string,
+ .klen = 20,
+ .iv = zeroed_string,
+ .input = "\x03\x88\xDA\xCE\x60\xB6\xA3\x92"
+ "\xF3\x28\xC2\xB9\x71\xB2\xFE\x78"
+ "\x97\xFE\x4C\x23\x37\x42\x01\xE0"
+ "\x81\x9F\x8D\xC5\xD7\x41\xA0\x1B",
+ .ilen = 32,
+ .assoc = zeroed_string,
+ .alen = 8,
+ .result = zeroed_string,
+ .rlen = 16,
+
+ },{
+ .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c"
+ "\x6d\x6a\x8f\x94\x67\x30\x83\x08"
+ "\x00\x00\x00\x00",
+ .klen = 20,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x01"
+ "\x00\x00\x00\x00",
+ .input = "\xC0\x0D\x8B\x42\x0F\x8F\x34\x18"
+ "\x88\xB1\xC5\xBC\xC5\xB6\xD6\x28"
+ "\x6A\x9D\xDF\x11\x5E\xFE\x5E\x9D"
+ "\x2F\x70\x44\x92\xF7\xF2\xE3\xEF",
+ .ilen = 32,
+ .assoc = zeroed_string,
+ .alen = 8,
+ .result = zeroed_string,
+ .rlen = 16,
+ }, {
+ .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c"
+ "\x6d\x6a\x8f\x94\x67\x30\x83\x08"
+ "\x00\x00\x00\x00",
+ .klen = 20,
+ .iv = zeroed_string,
+ .input = "\x4B\xB1\xB5\xE3\x25\x71\x70\xDE"
+ "\x7F\xC9\x9C\xA5\x14\x19\xF2\xAC"
+ "\x0B\x8F\x88\x69\x17\xE6\xB4\x3C"
+ "\xB1\x68\xFD\x14\x52\x64\x61\xB2",
+ .ilen = 32,
+ .assoc = zeroed_string,
+ .alen = 8,
+ .result = "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .rlen = 16,
+ }, {
+ .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c"
+ "\x6d\x6a\x8f\x94\x67\x30\x83\x08"
+ "\x00\x00\x00\x00",
+ .klen = 20,
+ .iv = zeroed_string,
+ .input = "\x4B\xB1\xB5\xE3\x25\x71\x70\xDE"
+ "\x7F\xC9\x9C\xA5\x14\x19\xF2\xAC"
+ "\x90\x92\xB7\xE3\x5F\xA3\x9A\x63"
+ "\x7E\xD7\x1F\xD8\xD3\x7C\x4B\xF5",
+ .ilen = 32,
+ .assoc = "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .alen = 8,
+ .result = "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .rlen = 16,
+
+ }, {
+ .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c"
+ "\x6d\x6a\x8f\x94\x67\x30\x83\x08"
+ "\x00\x00\x00\x00",
+ .klen = 20,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x01"
+ "\x00\x00\x00\x00",
+ .input = "\xC1\x0C\x8A\x43\x0E\x8E\x35\x19"
+ "\x89\xB0\xC4\xBD\xC4\xB7\xD7\x29"
+ "\x64\x50\xF9\x32\x13\xFB\x74\x61"
+ "\xF4\xED\x52\xD3\xC5\x10\x55\x3C",
+ .ilen = 32,
+ .assoc = "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .alen = 8,
+ .result = "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .rlen = 16,
+ }, {
+ .key = "\xfe\xff\xe9\x92\x86\x65\x73\x1c"
+ "\x6d\x6a\x8f\x94\x67\x30\x83\x08"
+ "\x00\x00\x00\x00",
+ .klen = 20,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x01"
+ "\x00\x00\x00\x00",
+ .input = "\xC1\x0C\x8A\x43\x0E\x8E\x35\x19"
+ "\x89\xB0\xC4\xBD\xC4\xB7\xD7\x29"
+ "\x98\x14\xA1\x42\x37\x80\xFD\x90"
+ "\x68\x12\x01\xA8\x91\x89\xB9\x83"
+ "\x5B\x11\x77\x12\x9B\xFF\x24\x89"
+ "\x94\x5F\x18\x12\xBA\x27\x09\x39"
+ "\x99\x96\x76\x42\x15\x1C\xCD\xCB"
+ "\xDC\xD3\xDA\x65\x73\xAF\x80\xCD"
+ "\xD2\xB6\xC2\x4A\x76\xC2\x92\x85"
+ "\xBD\xCF\x62\x98\x58\x14\xE5\xBD",
+ .ilen = 80,
+ .assoc = "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .alen = 8,
+ .result = "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01"
+ "\x01\x01\x01\x01\x01\x01\x01\x01",
+ .rlen = 64,
+ }, {
+ .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x00\x00\x00\x00",
+ .klen = 20,
+ .iv = "\x00\x00\x45\x67\x89\xab\xcd\xef"
+ "\x00\x00\x00\x00",
+ .input = "\xC1\x76\x33\x85\xE2\x9B\x5F\xDE"
+ "\xDE\x89\x3D\x42\xE7\xC9\x69\x8A"
+ "\x44\x6D\xC3\x88\x46\x2E\xC2\x01"
+ "\x5E\xF6\x0C\x39\xF0\xC4\xA5\x82"
+ "\xCD\xE8\x31\xCC\x0A\x4C\xE4\x44"
+ "\x41\xA9\x82\x6F\x22\xA1\x23\x1A"
+ "\xA8\xE3\x16\xFD\x31\x5C\x27\x31"
+ "\xF1\x7F\x01\x63\xA3\xAF\x70\xA1"
+ "\xCF\x07\x57\x41\x67\xD0\xC4\x42"
+ "\xDB\x18\xC6\x4C\x4C\xE0\x3D\x9F"
+ "\x05\x07\xFB\x13\x7D\x4A\xCA\x5B"
+ "\xF0\xBF\x64\x7E\x05\xB1\x72\xEE"
+ "\x7C\x3B\xD4\xCD\x14\x03\xB2\x2C"
+ "\xD3\xA9\xEE\xFA\x17\xFC\x9C\xDF"
+ "\xC7\x75\x40\xFF\xAE\xAD\x1E\x59"
+ "\x2F\x30\x24\xFB\xAD\x6B\x10\xFA"
+ "\x6C\x9F\x5B\xE7\x25\xD5\xD0\x25"
+ "\xAC\x4A\x4B\xDA\xFC\x7A\x85\x1B"
+ "\x7E\x13\x06\x82\x08\x17\xA4\x35"
+ "\xEC\xC5\x8D\x63\x96\x81\x0A\x8F"
+ "\xA3\x05\x38\x95\x20\x1A\x47\x04"
+ "\x6F\x6D\xDA\x8F\xEF\xC1\x76\x35"
+ "\x6B\xC7\x4D\x0F\x94\x12\xCA\x3E"
+ "\x2E\xD5\x03\x2E\x86\x7E\xAA\x3B"
+ "\x37\x08\x1C\xCF\xBA\x5D\x71\x46"
+ "\x80\x72\xB0\x4C\x82\x0D\x60\x3C",
+ .ilen = 208,
+ .assoc = "\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa"
+ "\xaa\xaa\xaa\xaa",
+ .alen = 12,
+ .result = "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff"
+ "\xff\xff\xff\xff\xff\xff\xff\xff",
+ .rlen = 192,
+
+ }
+};
+
+static struct aead_testvec aes_gcm_rfc4543_enc_tv_template[] = {
+ { /* From draft-mcgrew-gcm-test-01 */
+ .key = "\x4c\x80\xcd\xef\xbb\x5d\x10\xda"
+ "\x90\x6a\xc7\x3c\x36\x13\xa6\x34"
+ "\x22\x43\x3c\x64",
+ .klen = 20,
+ .iv = zeroed_string,
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x07",
+ .alen = 8,
+ .input = "\x45\x00\x00\x30\xda\x3a\x00\x00"
+ "\x80\x01\xdf\x3b\xc0\xa8\x00\x05"
+ "\xc0\xa8\x00\x01\x08\x00\xc6\xcd"
+ "\x02\x00\x07\x00\x61\x62\x63\x64"
+ "\x65\x66\x67\x68\x69\x6a\x6b\x6c"
+ "\x6d\x6e\x6f\x70\x71\x72\x73\x74"
+ "\x01\x02\x02\x01",
+ .ilen = 52,
+ .result = "\x45\x00\x00\x30\xda\x3a\x00\x00"
+ "\x80\x01\xdf\x3b\xc0\xa8\x00\x05"
+ "\xc0\xa8\x00\x01\x08\x00\xc6\xcd"
+ "\x02\x00\x07\x00\x61\x62\x63\x64"
+ "\x65\x66\x67\x68\x69\x6a\x6b\x6c"
+ "\x6d\x6e\x6f\x70\x71\x72\x73\x74"
+ "\x01\x02\x02\x01\xf2\xa9\xa8\x36"
+ "\xe1\x55\x10\x6a\xa8\xdc\xd6\x18"
+ "\xe4\x09\x9a\xaa",
+ .rlen = 68,
+ }
+};
+
+static struct aead_testvec aes_gcm_rfc4543_dec_tv_template[] = {
+ { /* From draft-mcgrew-gcm-test-01 */
+ .key = "\x4c\x80\xcd\xef\xbb\x5d\x10\xda"
+ "\x90\x6a\xc7\x3c\x36\x13\xa6\x34"
+ "\x22\x43\x3c\x64",
+ .klen = 20,
+ .iv = zeroed_string,
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x07",
+ .alen = 8,
+ .input = "\x45\x00\x00\x30\xda\x3a\x00\x00"
+ "\x80\x01\xdf\x3b\xc0\xa8\x00\x05"
+ "\xc0\xa8\x00\x01\x08\x00\xc6\xcd"
+ "\x02\x00\x07\x00\x61\x62\x63\x64"
+ "\x65\x66\x67\x68\x69\x6a\x6b\x6c"
+ "\x6d\x6e\x6f\x70\x71\x72\x73\x74"
+ "\x01\x02\x02\x01\xf2\xa9\xa8\x36"
+ "\xe1\x55\x10\x6a\xa8\xdc\xd6\x18"
+ "\xe4\x09\x9a\xaa",
+ .ilen = 68,
+ .result = "\x45\x00\x00\x30\xda\x3a\x00\x00"
+ "\x80\x01\xdf\x3b\xc0\xa8\x00\x05"
+ "\xc0\xa8\x00\x01\x08\x00\xc6\xcd"
+ "\x02\x00\x07\x00\x61\x62\x63\x64"
+ "\x65\x66\x67\x68\x69\x6a\x6b\x6c"
+ "\x6d\x6e\x6f\x70\x71\x72\x73\x74"
+ "\x01\x02\x02\x01",
+ .rlen = 52,
+ }, { /* nearly same as previous, but should fail */
+ .key = "\x4c\x80\xcd\xef\xbb\x5d\x10\xda"
+ "\x90\x6a\xc7\x3c\x36\x13\xa6\x34"
+ "\x22\x43\x3c\x64",
+ .klen = 20,
+ .iv = zeroed_string,
+ .assoc = "\x00\x00\x43\x21\x00\x00\x00\x07",
+ .alen = 8,
+ .input = "\x45\x00\x00\x30\xda\x3a\x00\x00"
+ "\x80\x01\xdf\x3b\xc0\xa8\x00\x05"
+ "\xc0\xa8\x00\x01\x08\x00\xc6\xcd"
+ "\x02\x00\x07\x00\x61\x62\x63\x64"
+ "\x65\x66\x67\x68\x69\x6a\x6b\x6c"
+ "\x6d\x6e\x6f\x70\x71\x72\x73\x74"
+ "\x01\x02\x02\x01\xf2\xa9\xa8\x36"
+ "\xe1\x55\x10\x6a\xa8\xdc\xd6\x18"
+ "\x00\x00\x00\x00",
+ .ilen = 68,
+ .novrfy = 1,
+ .result = "\x45\x00\x00\x30\xda\x3a\x00\x00"
+ "\x80\x01\xdf\x3b\xc0\xa8\x00\x05"
+ "\xc0\xa8\x00\x01\x08\x00\xc6\xcd"
+ "\x02\x00\x07\x00\x61\x62\x63\x64"
+ "\x65\x66\x67\x68\x69\x6a\x6b\x6c"
+ "\x6d\x6e\x6f\x70\x71\x72\x73\x74"
+ "\x01\x02\x02\x01",
+ .rlen = 52,
+ },
+};
+
static struct aead_testvec aes_ccm_enc_tv_template[] = {
{ /* From RFC 3610 */
.key = "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
@@ -5688,7 +20125,29 @@ static struct aead_testvec aes_ccm_enc_tv_template[] = {
"\x7c\xf9\xbe\xc2\x40\x88\x97\xc6"
"\xba",
.rlen = 33,
- },
+ }, {
+ /*
+ * This is the same vector as aes_ccm_rfc4309_enc_tv_template[0]
+ * below but rewritten to use the ccm algorithm directly.
+ */
+ .key = "\x83\xac\x54\x66\xc2\xeb\xe5\x05"
+ "\x2e\x01\xd1\xfc\x5d\x82\x66\x2e",
+ .klen = 16,
+ .iv = "\x03\x96\xac\x59\x30\x07\xa1\xe2\xa2\xc7\x55\x24\0\0\0\0",
+ .alen = 0,
+ .input = "\x19\xc8\x81\xf6\xe9\x86\xff\x93"
+ "\x0b\x78\x67\xe5\xbb\xb7\xfc\x6e"
+ "\x83\x77\xb3\xa6\x0c\x8c\x9f\x9c"
+ "\x35\x2e\xad\xe0\x62\xf9\x91\xa1",
+ .ilen = 32,
+ .result = "\xab\x6f\xe1\x69\x1d\x19\x99\xa8"
+ "\x92\xa0\xc4\x6f\x7e\xe2\x8b\xb1"
+ "\x70\xbb\x8c\xa6\x4c\x6e\x97\x8a"
+ "\x57\x2b\xbe\x5d\x98\xa6\xb1\x32"
+ "\xda\x24\xea\xd9\xa1\x39\x98\xfd"
+ "\xa4\xbe\xd9\xf2\x1a\x6d\x22\xa8",
+ .rlen = 48,
+ }
};
static struct aead_testvec aes_ccm_dec_tv_template[] = {
@@ -5820,9 +20279,477 @@ static struct aead_testvec aes_ccm_dec_tv_template[] = {
},
};
+/*
+ * rfc4309 refers to section 8 of rfc3610 for test vectors, but they all
+ * use a 13-byte nonce, we only support an 11-byte nonce. Similarly, all of
+ * Special Publication 800-38C's test vectors also use nonce lengths our
+ * implementation doesn't support. The following are taken from fips cavs
+ * fax files on hand at Red Hat.
+ *
+ * nb: actual key lengths are (klen - 3), the last 3 bytes are actually
+ * part of the nonce which combine w/the iv, but need to be input this way.
+ */
+static struct aead_testvec aes_ccm_rfc4309_enc_tv_template[] = {
+ {
+ .key = "\x83\xac\x54\x66\xc2\xeb\xe5\x05"
+ "\x2e\x01\xd1\xfc\x5d\x82\x66\x2e"
+ "\x96\xac\x59",
+ .klen = 19,
+ .iv = "\x30\x07\xa1\xe2\xa2\xc7\x55\x24",
+ .alen = 0,
+ .input = "\x19\xc8\x81\xf6\xe9\x86\xff\x93"
+ "\x0b\x78\x67\xe5\xbb\xb7\xfc\x6e"
+ "\x83\x77\xb3\xa6\x0c\x8c\x9f\x9c"
+ "\x35\x2e\xad\xe0\x62\xf9\x91\xa1",
+ .ilen = 32,
+ .result = "\xab\x6f\xe1\x69\x1d\x19\x99\xa8"
+ "\x92\xa0\xc4\x6f\x7e\xe2\x8b\xb1"
+ "\x70\xbb\x8c\xa6\x4c\x6e\x97\x8a"
+ "\x57\x2b\xbe\x5d\x98\xa6\xb1\x32"
+ "\xda\x24\xea\xd9\xa1\x39\x98\xfd"
+ "\xa4\xbe\xd9\xf2\x1a\x6d\x22\xa8",
+ .rlen = 48,
+ }, {
+ .key = "\x1e\x2c\x7e\x01\x41\x9a\xef\xc0"
+ "\x0d\x58\x96\x6e\x5c\xa2\x4b\xd3"
+ "\x4f\xa3\x19",
+ .klen = 19,
+ .iv = "\xd3\x01\x5a\xd8\x30\x60\x15\x56",
+ .assoc = "\xda\xe6\x28\x9c\x45\x2d\xfd\x63"
+ "\x5e\xda\x4c\xb6\xe6\xfc\xf9\xb7"
+ "\x0c\x56\xcb\xe4\xe0\x05\x7a\xe1"
+ "\x0a\x63\x09\x78\xbc\x2c\x55\xde",
+ .alen = 32,
+ .input = "\x87\xa3\x36\xfd\x96\xb3\x93\x78"
+ "\xa9\x28\x63\xba\x12\xa3\x14\x85"
+ "\x57\x1e\x06\xc9\x7b\x21\xef\x76"
+ "\x7f\x38\x7e\x8e\x29\xa4\x3e\x7e",
+ .ilen = 32,
+ .result = "\x8a\x1e\x11\xf0\x02\x6b\xe2\x19"
+ "\xfc\x70\xc4\x6d\x8e\xb7\x99\xab"
+ "\xc5\x4b\xa2\xac\xd3\xf3\x48\xff"
+ "\x3b\xb5\xce\x53\xef\xde\xbb\x02"
+ "\xa9\x86\x15\x6c\x13\xfe\xda\x0a"
+ "\x22\xb8\x29\x3d\xd8\x39\x9a\x23",
+ .rlen = 48,
+ }, {
+ .key = "\xf4\x6b\xc2\x75\x62\xfe\xb4\xe1"
+ "\xa3\xf0\xff\xdd\x4e\x4b\x12\x75"
+ "\x53\x14\x73\x66\x8d\x88\xf6\x80"
+ "\xa0\x20\x35",
+ .klen = 27,
+ .iv = "\x26\xf2\x21\x8d\x50\x20\xda\xe2",
+ .assoc = "\x5b\x9e\x13\x67\x02\x5e\xef\xc1"
+ "\x6c\xf9\xd7\x1e\x52\x8f\x7a\x47"
+ "\xe9\xd4\xcf\x20\x14\x6e\xf0\x2d"
+ "\xd8\x9e\x2b\x56\x10\x23\x56\xe7",
+ .alen = 32,
+ .ilen = 0,
+ .result = "\x36\xea\x7a\x70\x08\xdc\x6a\xbc"
+ "\xad\x0c\x7a\x63\xf6\x61\xfd\x9b",
+ .rlen = 16,
+ }, {
+ .key = "\x56\xdf\x5c\x8f\x26\x3f\x0e\x42"
+ "\xef\x7a\xd3\xce\xfc\x84\x60\x62"
+ "\xca\xb4\x40\xaf\x5f\xc9\xc9\x01"
+ "\xd6\x3c\x8c",
+ .klen = 27,
+ .iv = "\x86\x84\xb6\xcd\xef\x09\x2e\x94",
+ .assoc = "\x02\x65\x78\x3c\xe9\x21\x30\x91"
+ "\xb1\xb9\xda\x76\x9a\x78\x6d\x95"
+ "\xf2\x88\x32\xa3\xf2\x50\xcb\x4c"
+ "\xe3\x00\x73\x69\x84\x69\x87\x79",
+ .alen = 32,
+ .input = "\x9f\xd2\x02\x4b\x52\x49\x31\x3c"
+ "\x43\x69\x3a\x2d\x8e\x70\xad\x7e"
+ "\xe0\xe5\x46\x09\x80\x89\x13\xb2"
+ "\x8c\x8b\xd9\x3f\x86\xfb\xb5\x6b",
+ .ilen = 32,
+ .result = "\x39\xdf\x7c\x3c\x5a\x29\xb9\x62"
+ "\x5d\x51\xc2\x16\xd8\xbd\x06\x9f"
+ "\x9b\x6a\x09\x70\xc1\x51\x83\xc2"
+ "\x66\x88\x1d\x4f\x9a\xda\xe0\x1e"
+ "\xc7\x79\x11\x58\xe5\x6b\x20\x40"
+ "\x7a\xea\x46\x42\x8b\xe4\x6f\xe1",
+ .rlen = 48,
+ }, {
+ .key = "\xe0\x8d\x99\x71\x60\xd7\x97\x1a"
+ "\xbd\x01\x99\xd5\x8a\xdf\x71\x3a"
+ "\xd3\xdf\x24\x4b\x5e\x3d\x4b\x4e"
+ "\x30\x7a\xb9\xd8\x53\x0a\x5e\x2b"
+ "\x1e\x29\x91",
+ .klen = 35,
+ .iv = "\xad\x8e\xc1\x53\x0a\xcf\x2d\xbe",
+ .assoc = "\x19\xb6\x1f\x57\xc4\xf3\xf0\x8b"
+ "\x78\x2b\x94\x02\x29\x0f\x42\x27"
+ "\x6b\x75\xcb\x98\x34\x08\x7e\x79"
+ "\xe4\x3e\x49\x0d\x84\x8b\x22\x87",
+ .alen = 32,
+ .input = "\xe1\xd9\xd8\x13\xeb\x3a\x75\x3f"
+ "\x9d\xbd\x5f\x66\xbe\xdc\xbb\x66"
+ "\xbf\x17\x99\x62\x4a\x39\x27\x1f"
+ "\x1d\xdc\x24\xae\x19\x2f\x98\x4c",
+ .ilen = 32,
+ .result = "\x19\xb8\x61\x33\x45\x2b\x43\x96"
+ "\x6f\x51\xd0\x20\x30\x7d\x9b\xc6"
+ "\x26\x3d\xf8\xc9\x65\x16\xa8\x9f"
+ "\xf0\x62\x17\x34\xf2\x1e\x8d\x75"
+ "\x4e\x13\xcc\xc0\xc3\x2a\x54\x2d",
+ .rlen = 40,
+ }, {
+ .key = "\x7c\xc8\x18\x3b\x8d\x99\xe0\x7c"
+ "\x45\x41\xb8\xbd\x5c\xa7\xc2\x32"
+ "\x8a\xb8\x02\x59\xa4\xfe\xa9\x2c"
+ "\x09\x75\x9a\x9b\x3c\x9b\x27\x39"
+ "\xf9\xd9\x4e",
+ .klen = 35,
+ .iv = "\x63\xb5\x3d\x9d\x43\xf6\x1e\x50",
+ .assoc = "\x57\xf5\x6b\x8b\x57\x5c\x3d\x3b"
+ "\x13\x02\x01\x0c\x83\x4c\x96\x35"
+ "\x8e\xd6\x39\xcf\x7d\x14\x9b\x94"
+ "\xb0\x39\x36\xe6\x8f\x57\xe0\x13",
+ .alen = 32,
+ .input = "\x3b\x6c\x29\x36\xb6\xef\x07\xa6"
+ "\x83\x72\x07\x4f\xcf\xfa\x66\x89"
+ "\x5f\xca\xb1\xba\xd5\x8f\x2c\x27"
+ "\x30\xdb\x75\x09\x93\xd4\x65\xe4",
+ .ilen = 32,
+ .result = "\xb0\x88\x5a\x33\xaa\xe5\xc7\x1d"
+ "\x85\x23\xc7\xc6\x2f\xf4\x1e\x3d"
+ "\xcc\x63\x44\x25\x07\x78\x4f\x9e"
+ "\x96\xb8\x88\xeb\xbc\x48\x1f\x06"
+ "\x39\xaf\x39\xac\xd8\x4a\x80\x39"
+ "\x7b\x72\x8a\xf7",
+ .rlen = 44,
+ }, {
+ .key = "\xab\xd0\xe9\x33\x07\x26\xe5\x83"
+ "\x8c\x76\x95\xd4\xb6\xdc\xf3\x46"
+ "\xf9\x8f\xad\xe3\x02\x13\x83\x77"
+ "\x3f\xb0\xf1\xa1\xa1\x22\x0f\x2b"
+ "\x24\xa7\x8b",
+ .klen = 35,
+ .iv = "\x07\xcb\xcc\x0e\xe6\x33\xbf\xf5",
+ .assoc = "\xd4\xdb\x30\x1d\x03\xfe\xfd\x5f"
+ "\x87\xd4\x8c\xb6\xb6\xf1\x7a\x5d"
+ "\xab\x90\x65\x8d\x8e\xca\x4d\x4f"
+ "\x16\x0c\x40\x90\x4b\xc7\x36\x73",
+ .alen = 32,
+ .input = "\xf5\xc6\x7d\x48\xc1\xb7\xe6\x92"
+ "\x97\x5a\xca\xc4\xa9\x6d\xf9\x3d"
+ "\x6c\xde\xbc\xf1\x90\xea\x6a\xb2"
+ "\x35\x86\x36\xaf\x5c\xfe\x4b\x3a",
+ .ilen = 32,
+ .result = "\x83\x6f\x40\x87\x72\xcf\xc1\x13"
+ "\xef\xbb\x80\x21\x04\x6c\x58\x09"
+ "\x07\x1b\xfc\xdf\xc0\x3f\x5b\xc7"
+ "\xe0\x79\xa8\x6e\x71\x7c\x3f\xcf"
+ "\x5c\xda\xb2\x33\xe5\x13\xe2\x0d"
+ "\x74\xd1\xef\xb5\x0f\x3a\xb5\xf8",
+ .rlen = 48,
+ },
+};
+
+static struct aead_testvec aes_ccm_rfc4309_dec_tv_template[] = {
+ {
+ .key = "\xab\x2f\x8a\x74\xb7\x1c\xd2\xb1"
+ "\xff\x80\x2e\x48\x7d\x82\xf8\xb9"
+ "\xc6\xfb\x7d",
+ .klen = 19,
+ .iv = "\x80\x0d\x13\xab\xd8\xa6\xb2\xd8",
+ .alen = 0,
+ .input = "\xd5\xe8\x93\x9f\xc7\x89\x2e\x2b",
+ .ilen = 8,
+ .result = "\x00",
+ .rlen = 0,
+ .novrfy = 1,
+ }, {
+ .key = "\xab\x2f\x8a\x74\xb7\x1c\xd2\xb1"
+ "\xff\x80\x2e\x48\x7d\x82\xf8\xb9"
+ "\xaf\x94\x87",
+ .klen = 19,
+ .iv = "\x78\x35\x82\x81\x7f\x88\x94\x68",
+ .alen = 0,
+ .input = "\x41\x3c\xb8\x87\x73\xcb\xf3\xf3",
+ .ilen = 8,
+ .result = "\x00",
+ .rlen = 0,
+ }, {
+ .key = "\x61\x0e\x8c\xae\xe3\x23\xb6\x38"
+ "\x76\x1c\xf6\x3a\x67\xa3\x9c\xd8"
+ "\xc6\xfb\x7d",
+ .klen = 19,
+ .iv = "\x80\x0d\x13\xab\xd8\xa6\xb2\xd8",
+ .assoc = "\xf3\x94\x87\x78\x35\x82\x81\x7f"
+ "\x88\x94\x68\xb1\x78\x6b\x2b\xd6"
+ "\x04\x1f\x4e\xed\x78\xd5\x33\x66"
+ "\xd8\x94\x99\x91\x81\x54\x62\x57",
+ .alen = 32,
+ .input = "\xf0\x7c\x29\x02\xae\x1c\x2f\x55"
+ "\xd0\xd1\x3d\x1a\xa3\x6d\xe4\x0a"
+ "\x86\xb0\x87\x6b\x62\x33\x8c\x34"
+ "\xce\xab\x57\xcc\x79\x0b\xe0\x6f"
+ "\x5c\x3e\x48\x1f\x6c\x46\xf7\x51"
+ "\x8b\x84\x83\x2a\xc1\x05\xb8\xc5",
+ .ilen = 48,
+ .result = "\x50\x82\x3e\x07\xe2\x1e\xb6\xfb"
+ "\x33\xe4\x73\xce\xd2\xfb\x95\x79"
+ "\xe8\xb4\xb5\x77\x11\x10\x62\x6f"
+ "\x6a\x82\xd1\x13\xec\xf5\xd0\x48",
+ .rlen = 32,
+ .novrfy = 1,
+ }, {
+ .key = "\x61\x0e\x8c\xae\xe3\x23\xb6\x38"
+ "\x76\x1c\xf6\x3a\x67\xa3\x9c\xd8"
+ "\x05\xe0\xc9",
+ .klen = 19,
+ .iv = "\x0f\xed\x34\xea\x97\xd4\x3b\xdf",
+ .assoc = "\x49\x5c\x50\x1f\x1d\x94\xcc\x81"
+ "\xba\xb7\xb6\x03\xaf\xa5\xc1\xa1"
+ "\xd8\x5c\x42\x68\xe0\x6c\xda\x89"
+ "\x05\xac\x56\xac\x1b\x2a\xd3\x86",
+ .alen = 32,
+ .input = "\x39\xbe\x7d\x15\x62\x77\xf3\x3c"
+ "\xad\x83\x52\x6d\x71\x03\x25\x1c"
+ "\xed\x81\x3a\x9a\x16\x7d\x19\x80"
+ "\x72\x04\x72\xd0\xf6\xff\x05\x0f"
+ "\xb7\x14\x30\x00\x32\x9e\xa0\xa6"
+ "\x9e\x5a\x18\xa1\xb8\xfe\xdb\xd3",
+ .ilen = 48,
+ .result = "\x75\x05\xbe\xc2\xd9\x1e\xde\x60"
+ "\x47\x3d\x8c\x7d\xbd\xb5\xd9\xb7"
+ "\xf2\xae\x61\x05\x8f\x82\x24\x3f"
+ "\x9c\x67\x91\xe1\x38\x4f\xe4\x0c",
+ .rlen = 32,
+ }, {
+ .key = "\x39\xbb\xa7\xbe\x59\x97\x9e\x73"
+ "\xa2\xbc\x6b\x98\xd7\x75\x7f\xe3"
+ "\xa4\x48\x93\x39\x26\x71\x4a\xc6"
+ "\xee\x49\x83",
+ .klen = 27,
+ .iv = "\xe9\xa9\xff\xe9\x57\xba\xfd\x9e",
+ .assoc = "\x44\xa6\x2c\x05\xe9\xe1\x43\xb1"
+ "\x58\x7c\xf2\x5c\x6d\x39\x0a\x64"
+ "\xa4\xf0\x13\x05\xd1\x77\x99\x67"
+ "\x11\xc4\xc6\xdb\x00\x56\x36\x61",
+ .alen = 32,
+ .input = "\x71\x99\xfa\xf4\x44\x12\x68\x9b",
+ .ilen = 8,
+ .result = "\x00",
+ .rlen = 0,
+ }, {
+ .key = "\x58\x5d\xa0\x96\x65\x1a\x04\xd7"
+ "\x96\xe5\xc5\x68\xaa\x95\x35\xe0"
+ "\x29\xa0\xba\x9e\x48\x78\xd1\xba"
+ "\xee\x49\x83",
+ .klen = 27,
+ .iv = "\xe9\xa9\xff\xe9\x57\xba\xfd\x9e",
+ .assoc = "\x44\xa6\x2c\x05\xe9\xe1\x43\xb1"
+ "\x58\x7c\xf2\x5c\x6d\x39\x0a\x64"
+ "\xa4\xf0\x13\x05\xd1\x77\x99\x67"
+ "\x11\xc4\xc6\xdb\x00\x56\x36\x61",
+ .alen = 32,
+ .input = "\xfb\xe5\x5d\x34\xbe\xe5\xe8\xe7"
+ "\x5a\xef\x2f\xbf\x1f\x7f\xd4\xb2"
+ "\x66\xca\x61\x1e\x96\x7a\x61\xb3"
+ "\x1c\x16\x45\x52\xba\x04\x9c\x9f"
+ "\xb1\xd2\x40\xbc\x52\x7c\x6f\xb1",
+ .ilen = 40,
+ .result = "\x85\x34\x66\x42\xc8\x92\x0f\x36"
+ "\x58\xe0\x6b\x91\x3c\x98\x5c\xbb"
+ "\x0a\x85\xcc\x02\xad\x7a\x96\xe9"
+ "\x65\x43\xa4\xc3\x0f\xdc\x55\x81",
+ .rlen = 32,
+ }, {
+ .key = "\x58\x5d\xa0\x96\x65\x1a\x04\xd7"
+ "\x96\xe5\xc5\x68\xaa\x95\x35\xe0"
+ "\x29\xa0\xba\x9e\x48\x78\xd1\xba"
+ "\xd1\xfc\x57",
+ .klen = 27,
+ .iv = "\x9c\xfe\xb8\x9c\xad\x71\xaa\x1f",
+ .assoc = "\x86\x67\xa5\xa9\x14\x5f\x0d\xc6"
+ "\xff\x14\xc7\x44\xbf\x6c\x3a\xc3"
+ "\xff\xb6\x81\xbd\xe2\xd5\x06\xc7"
+ "\x3c\xa1\x52\x13\x03\x8a\x23\x3a",
+ .alen = 32,
+ .input = "\x3f\x66\xb0\x9d\xe5\x4b\x38\x00"
+ "\xc6\x0e\x6e\xe5\xd6\x98\xa6\x37"
+ "\x8c\x26\x33\xc6\xb2\xa2\x17\xfa"
+ "\x64\x19\xc0\x30\xd7\xfc\x14\x6b"
+ "\xe3\x33\xc2\x04\xb0\x37\xbe\x3f"
+ "\xa9\xb4\x2d\x68\x03\xa3\x44\xef",
+ .ilen = 48,
+ .result = "\x02\x87\x4d\x28\x80\x6e\xb2\xed"
+ "\x99\x2a\xa8\xca\x04\x25\x45\x90"
+ "\x1d\xdd\x5a\xd9\xe4\xdb\x9c\x9c"
+ "\x49\xe9\x01\xfe\xa7\x80\x6d\x6b",
+ .rlen = 32,
+ .novrfy = 1,
+ }, {
+ .key = "\xa4\x4b\x54\x29\x0a\xb8\x6d\x01"
+ "\x5b\x80\x2a\xcf\x25\xc4\xb7\x5c"
+ "\x20\x2c\xad\x30\xc2\x2b\x41\xfb"
+ "\x0e\x85\xbc\x33\xad\x0f\x2b\xff"
+ "\xee\x49\x83",
+ .klen = 35,
+ .iv = "\xe9\xa9\xff\xe9\x57\xba\xfd\x9e",
+ .alen = 0,
+ .input = "\x1f\xb8\x8f\xa3\xdd\x54\x00\xf2",
+ .ilen = 8,
+ .result = "\x00",
+ .rlen = 0,
+ }, {
+ .key = "\x39\xbb\xa7\xbe\x59\x97\x9e\x73"
+ "\xa2\xbc\x6b\x98\xd7\x75\x7f\xe3"
+ "\xa4\x48\x93\x39\x26\x71\x4a\xc6"
+ "\xae\x8f\x11\x4c\xc2\x9c\x4a\xbb"
+ "\x85\x34\x66",
+ .klen = 35,
+ .iv = "\x42\xc8\x92\x0f\x36\x58\xe0\x6b",
+ .alen = 0,
+ .input = "\x48\x01\x5e\x02\x24\x04\x66\x47"
+ "\xa1\xea\x6f\xaf\xe8\xfc\xfb\xdd"
+ "\xa5\xa9\x87\x8d\x84\xee\x2e\x77"
+ "\xbb\x86\xb9\xf5\x5c\x6c\xff\xf6"
+ "\x72\xc3\x8e\xf7\x70\xb1\xb2\x07"
+ "\xbc\xa8\xa3\xbd\x83\x7c\x1d\x2a",
+ .ilen = 48,
+ .result = "\xdc\x56\xf2\x71\xb0\xb1\xa0\x6c"
+ "\xf0\x97\x3a\xfb\x6d\xe7\x32\x99"
+ "\x3e\xaf\x70\x5e\xb2\x4d\xea\x39"
+ "\x89\xd4\x75\x7a\x63\xb1\xda\x93",
+ .rlen = 32,
+ .novrfy = 1,
+ }, {
+ .key = "\x58\x5d\xa0\x96\x65\x1a\x04\xd7"
+ "\x96\xe5\xc5\x68\xaa\x95\x35\xe0"
+ "\x29\xa0\xba\x9e\x48\x78\xd1\xba"
+ "\x0d\x1a\x53\x3b\xb5\xe3\xf8\x8b"
+ "\xcf\x76\x3f",
+ .klen = 35,
+ .iv = "\xd9\x95\x75\x8f\x44\x89\x40\x7b",
+ .assoc = "\x8f\x86\x6c\x4d\x1d\xc5\x39\x88"
+ "\xc8\xf3\x5c\x52\x10\x63\x6f\x2b"
+ "\x8a\x2a\xc5\x6f\x30\x23\x58\x7b"
+ "\xfb\x36\x03\x11\xb4\xd9\xf2\xfe",
+ .alen = 32,
+ .input = "\x48\x58\xd6\xf3\xad\x63\x58\xbf"
+ "\xae\xc7\x5e\xae\x83\x8f\x7b\xe4"
+ "\x78\x5c\x4c\x67\x71\x89\x94\xbf"
+ "\x47\xf1\x63\x7e\x1c\x59\xbd\xc5"
+ "\x7f\x44\x0a\x0c\x01\x18\x07\x92"
+ "\xe1\xd3\x51\xce\x32\x6d\x0c\x5b",
+ .ilen = 48,
+ .result = "\xc2\x54\xc8\xde\x78\x87\x77\x40"
+ "\x49\x71\xe4\xb7\xe7\xcb\x76\x61"
+ "\x0a\x41\xb9\xe9\xc0\x76\x54\xab"
+ "\x04\x49\x3b\x19\x93\x57\x25\x5d",
+ .rlen = 32,
+ },
+};
+
+/*
+ * ANSI X9.31 Continuous Pseudo-Random Number Generator (AES mode)
+ * test vectors, taken from Appendix B.2.9 and B.2.10:
+ * http://csrc.nist.gov/groups/STM/cavp/documents/rng/RNGVS.pdf
+ * Only AES-128 is supported at this time.
+ */
+#define ANSI_CPRNG_AES_TEST_VECTORS 6
+
+static struct cprng_testvec ansi_cprng_aes_tv_template[] = {
+ {
+ .key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42"
+ "\xed\x06\x1c\xab\xb8\xd4\x62\x02",
+ .klen = 16,
+ .dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62"
+ "\xd7\x1d\x4a\xfb\xb0\xe9\x22\xf9",
+ .dtlen = 16,
+ .v = "\x80\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .vlen = 16,
+ .result = "\x59\x53\x1e\xd1\x3b\xb0\xc0\x55"
+ "\x84\x79\x66\x85\xc1\x2f\x76\x41",
+ .rlen = 16,
+ .loops = 1,
+ }, {
+ .key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42"
+ "\xed\x06\x1c\xab\xb8\xd4\x62\x02",
+ .klen = 16,
+ .dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62"
+ "\xd7\x1d\x4a\xfb\xb0\xe9\x22\xfa",
+ .dtlen = 16,
+ .v = "\xc0\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .vlen = 16,
+ .result = "\x7c\x22\x2c\xf4\xca\x8f\xa2\x4c"
+ "\x1c\x9c\xb6\x41\xa9\xf3\x22\x0d",
+ .rlen = 16,
+ .loops = 1,
+ }, {
+ .key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42"
+ "\xed\x06\x1c\xab\xb8\xd4\x62\x02",
+ .klen = 16,
+ .dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62"
+ "\xd7\x1d\x4a\xfb\xb0\xe9\x22\xfb",
+ .dtlen = 16,
+ .v = "\xe0\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .vlen = 16,
+ .result = "\x8a\xaa\x00\x39\x66\x67\x5b\xe5"
+ "\x29\x14\x28\x81\xa9\x4d\x4e\xc7",
+ .rlen = 16,
+ .loops = 1,
+ }, {
+ .key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42"
+ "\xed\x06\x1c\xab\xb8\xd4\x62\x02",
+ .klen = 16,
+ .dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62"
+ "\xd7\x1d\x4a\xfb\xb0\xe9\x22\xfc",
+ .dtlen = 16,
+ .v = "\xf0\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .vlen = 16,
+ .result = "\x88\xdd\xa4\x56\x30\x24\x23\xe5"
+ "\xf6\x9d\xa5\x7e\x7b\x95\xc7\x3a",
+ .rlen = 16,
+ .loops = 1,
+ }, {
+ .key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42"
+ "\xed\x06\x1c\xab\xb8\xd4\x62\x02",
+ .klen = 16,
+ .dt = "\xe6\xb3\xbe\x78\x2a\x23\xfa\x62"
+ "\xd7\x1d\x4a\xfb\xb0\xe9\x22\xfd",
+ .dtlen = 16,
+ .v = "\xf8\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .vlen = 16,
+ .result = "\x05\x25\x92\x46\x61\x79\xd2\xcb"
+ "\x78\xc4\x0b\x14\x0a\x5a\x9a\xc8",
+ .rlen = 16,
+ .loops = 1,
+ }, { /* Monte Carlo Test */
+ .key = "\x9f\x5b\x51\x20\x0b\xf3\x34\xb5"
+ "\xd8\x2b\xe8\xc3\x72\x55\xc8\x48",
+ .klen = 16,
+ .dt = "\x63\x76\xbb\xe5\x29\x02\xba\x3b"
+ "\x67\xc9\x25\xfa\x70\x1f\x11\xac",
+ .dtlen = 16,
+ .v = "\x57\x2c\x8e\x76\x87\x26\x47\x97"
+ "\x7e\x74\xfb\xdd\xc4\x95\x01\xd1",
+ .vlen = 16,
+ .result = "\x48\xe9\xbd\x0d\x06\xee\x18\xfb"
+ "\xe4\x57\x90\xd5\xc3\xfc\x9b\x73",
+ .rlen = 16,
+ .loops = 10000,
+ },
+};
+
/* Cast5 test vectors from RFC 2144 */
-#define CAST5_ENC_TEST_VECTORS 3
-#define CAST5_DEC_TEST_VECTORS 3
+#define CAST5_ENC_TEST_VECTORS 4
+#define CAST5_DEC_TEST_VECTORS 4
+#define CAST5_CBC_ENC_TEST_VECTORS 1
+#define CAST5_CBC_DEC_TEST_VECTORS 1
+#define CAST5_CTR_ENC_TEST_VECTORS 2
+#define CAST5_CTR_DEC_TEST_VECTORS 2
static struct cipher_testvec cast5_enc_tv_template[] = {
{
@@ -5848,6 +20775,140 @@ static struct cipher_testvec cast5_enc_tv_template[] = {
.ilen = 8,
.result = "\x7a\xc8\x16\xd1\x6e\x9b\x30\x2e",
.rlen = 8,
+ }, { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A",
+ .klen = 16,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\x8D\xFC\x81\x9C\xCB\xAA\x5A\x1C"
+ "\x7E\x95\xCF\x40\xAB\x4D\x6F\xEA"
+ "\xD3\xD9\xB0\x9A\xB7\xC7\xE0\x2E"
+ "\xD1\x39\x34\x92\x8F\xFA\x14\xF1"
+ "\xD5\xD2\x7B\x59\x1F\x35\x28\xC2"
+ "\x20\xD9\x42\x06\xC9\x0B\x10\x04"
+ "\xF8\x79\xCD\x32\x86\x75\x4C\xB6"
+ "\x7B\x1C\x52\xB1\x91\x64\x22\x4B"
+ "\x13\xC7\xAE\x98\x0E\xB5\xCF\x6F"
+ "\x3F\xF4\x43\x96\x73\x0D\xA2\x05"
+ "\xDB\xFD\x28\x90\x2C\x56\xB9\x37"
+ "\x5B\x69\x0C\xAD\x84\x67\xFF\x15"
+ "\x4A\xD4\xA7\xD3\xDD\x99\x47\x3A"
+ "\xED\x34\x35\x78\x6B\x91\xC9\x32"
+ "\xE1\xBF\xBC\xB4\x04\x85\x6A\x39"
+ "\xC0\xBA\x51\xD0\x0F\x4E\xD1\xE2"
+ "\x1C\xFD\x0E\x05\x07\xF4\x10\xED"
+ "\xA2\x17\xFF\xF5\x64\xC6\x1A\x22"
+ "\xAD\x78\xE7\xD7\x11\xE9\x99\xB9"
+ "\xAA\xEC\x6F\xF8\x3B\xBF\xCE\x77"
+ "\x93\xE8\xAD\x1D\x50\x6C\xAE\xBC"
+ "\xBA\x5C\x80\xD1\x91\x65\x51\x1B"
+ "\xE8\x0A\xCD\x99\x96\x71\x3D\xB6"
+ "\x78\x75\x37\x55\xC1\xF5\x90\x40"
+ "\x34\xF4\x7E\xC8\xCC\x3A\x5F\x6E"
+ "\x36\xA1\xA1\xC2\x3A\x72\x42\x8E"
+ "\x0E\x37\x88\xE8\xCE\x83\xCB\xAD"
+ "\xE0\x69\x77\x50\xC7\x0C\x99\xCA"
+ "\x19\x5B\x30\x25\x9A\xEF\x9B\x0C"
+ "\xEF\x8F\x74\x4C\xCF\x49\x4E\xB9"
+ "\xC5\xAE\x9E\x2E\x78\x9A\xB9\x48"
+ "\xD5\x81\xE4\x37\x1D\xBF\x27\xD9"
+ "\xC5\xD6\x65\x43\x45\x8C\xBB\xB6"
+ "\x55\xF4\x06\xBB\x49\x53\x8B\x1B"
+ "\x07\xA9\x96\x69\x5B\xCB\x0F\xBC"
+ "\x93\x85\x90\x0F\x0A\x68\x40\x2A"
+ "\x95\xED\x2D\x88\xBF\x71\xD0\xBB"
+ "\xEC\xB0\x77\x6C\x79\xFC\x3C\x05"
+ "\x49\x3F\xB8\x24\xEF\x8E\x09\xA2"
+ "\x1D\xEF\x92\x02\x96\xD4\x7F\xC8"
+ "\x03\xB2\xCA\xDB\x17\x5C\x52\xCF"
+ "\xDD\x70\x37\x63\xAA\xA5\x83\x20"
+ "\x52\x02\xF6\xB9\xE7\x6E\x0A\xB6"
+ "\x79\x03\xA0\xDA\xA3\x79\x21\xBD"
+ "\xE3\x37\x3A\xC0\xF7\x2C\x32\xBE"
+ "\x8B\xE8\xA6\x00\xC7\x32\xD5\x06"
+ "\xBB\xE3\xAB\x06\x21\x82\xB8\x32"
+ "\x31\x34\x2A\xA7\x1F\x64\x99\xBF"
+ "\xFA\xDA\x3D\x75\xF7\x48\xD5\x48"
+ "\x4B\x52\x7E\xF6\x7C\xAB\x67\x59"
+ "\xC5\xDC\xA8\xC6\x63\x85\x4A\xDF"
+ "\xF0\x40\x5F\xCF\xE3\x58\x52\x67"
+ "\x7A\x24\x32\xC5\xEC\x9E\xA9\x6F"
+ "\x58\x56\xDD\x94\x1F\x71\x8D\xF4"
+ "\x6E\xFF\x2C\xA7\xA5\xD8\xBA\xAF"
+ "\x1D\x8B\xA2\x46\xB5\xC4\x9F\x57"
+ "\x8D\xD8\xB3\x3C\x02\x0D\xBB\x84"
+ "\xC7\xBD\xB4\x9A\x6E\xBB\xB1\x37"
+ "\x95\x79\xC4\xA7\xEA\x1D\xDC\x33"
+ "\x5D\x0B\x3F\x03\x8F\x30\xF9\xAE"
+ "\x4F\xFE\x24\x9C\x9A\x02\xE5\x57"
+ "\xF5\xBC\x25\xD6\x02\x56\x57\x1C",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -5875,6 +20936,718 @@ static struct cipher_testvec cast5_dec_tv_template[] = {
.ilen = 8,
.result = "\x01\x23\x45\x67\x89\xab\xcd\xef",
.rlen = 8,
+ }, { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A",
+ .klen = 16,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\x8D\xFC\x81\x9C\xCB\xAA\x5A\x1C"
+ "\x7E\x95\xCF\x40\xAB\x4D\x6F\xEA"
+ "\xD3\xD9\xB0\x9A\xB7\xC7\xE0\x2E"
+ "\xD1\x39\x34\x92\x8F\xFA\x14\xF1"
+ "\xD5\xD2\x7B\x59\x1F\x35\x28\xC2"
+ "\x20\xD9\x42\x06\xC9\x0B\x10\x04"
+ "\xF8\x79\xCD\x32\x86\x75\x4C\xB6"
+ "\x7B\x1C\x52\xB1\x91\x64\x22\x4B"
+ "\x13\xC7\xAE\x98\x0E\xB5\xCF\x6F"
+ "\x3F\xF4\x43\x96\x73\x0D\xA2\x05"
+ "\xDB\xFD\x28\x90\x2C\x56\xB9\x37"
+ "\x5B\x69\x0C\xAD\x84\x67\xFF\x15"
+ "\x4A\xD4\xA7\xD3\xDD\x99\x47\x3A"
+ "\xED\x34\x35\x78\x6B\x91\xC9\x32"
+ "\xE1\xBF\xBC\xB4\x04\x85\x6A\x39"
+ "\xC0\xBA\x51\xD0\x0F\x4E\xD1\xE2"
+ "\x1C\xFD\x0E\x05\x07\xF4\x10\xED"
+ "\xA2\x17\xFF\xF5\x64\xC6\x1A\x22"
+ "\xAD\x78\xE7\xD7\x11\xE9\x99\xB9"
+ "\xAA\xEC\x6F\xF8\x3B\xBF\xCE\x77"
+ "\x93\xE8\xAD\x1D\x50\x6C\xAE\xBC"
+ "\xBA\x5C\x80\xD1\x91\x65\x51\x1B"
+ "\xE8\x0A\xCD\x99\x96\x71\x3D\xB6"
+ "\x78\x75\x37\x55\xC1\xF5\x90\x40"
+ "\x34\xF4\x7E\xC8\xCC\x3A\x5F\x6E"
+ "\x36\xA1\xA1\xC2\x3A\x72\x42\x8E"
+ "\x0E\x37\x88\xE8\xCE\x83\xCB\xAD"
+ "\xE0\x69\x77\x50\xC7\x0C\x99\xCA"
+ "\x19\x5B\x30\x25\x9A\xEF\x9B\x0C"
+ "\xEF\x8F\x74\x4C\xCF\x49\x4E\xB9"
+ "\xC5\xAE\x9E\x2E\x78\x9A\xB9\x48"
+ "\xD5\x81\xE4\x37\x1D\xBF\x27\xD9"
+ "\xC5\xD6\x65\x43\x45\x8C\xBB\xB6"
+ "\x55\xF4\x06\xBB\x49\x53\x8B\x1B"
+ "\x07\xA9\x96\x69\x5B\xCB\x0F\xBC"
+ "\x93\x85\x90\x0F\x0A\x68\x40\x2A"
+ "\x95\xED\x2D\x88\xBF\x71\xD0\xBB"
+ "\xEC\xB0\x77\x6C\x79\xFC\x3C\x05"
+ "\x49\x3F\xB8\x24\xEF\x8E\x09\xA2"
+ "\x1D\xEF\x92\x02\x96\xD4\x7F\xC8"
+ "\x03\xB2\xCA\xDB\x17\x5C\x52\xCF"
+ "\xDD\x70\x37\x63\xAA\xA5\x83\x20"
+ "\x52\x02\xF6\xB9\xE7\x6E\x0A\xB6"
+ "\x79\x03\xA0\xDA\xA3\x79\x21\xBD"
+ "\xE3\x37\x3A\xC0\xF7\x2C\x32\xBE"
+ "\x8B\xE8\xA6\x00\xC7\x32\xD5\x06"
+ "\xBB\xE3\xAB\x06\x21\x82\xB8\x32"
+ "\x31\x34\x2A\xA7\x1F\x64\x99\xBF"
+ "\xFA\xDA\x3D\x75\xF7\x48\xD5\x48"
+ "\x4B\x52\x7E\xF6\x7C\xAB\x67\x59"
+ "\xC5\xDC\xA8\xC6\x63\x85\x4A\xDF"
+ "\xF0\x40\x5F\xCF\xE3\x58\x52\x67"
+ "\x7A\x24\x32\xC5\xEC\x9E\xA9\x6F"
+ "\x58\x56\xDD\x94\x1F\x71\x8D\xF4"
+ "\x6E\xFF\x2C\xA7\xA5\xD8\xBA\xAF"
+ "\x1D\x8B\xA2\x46\xB5\xC4\x9F\x57"
+ "\x8D\xD8\xB3\x3C\x02\x0D\xBB\x84"
+ "\xC7\xBD\xB4\x9A\x6E\xBB\xB1\x37"
+ "\x95\x79\xC4\xA7\xEA\x1D\xDC\x33"
+ "\x5D\x0B\x3F\x03\x8F\x30\xF9\xAE"
+ "\x4F\xFE\x24\x9C\x9A\x02\xE5\x57"
+ "\xF5\xBC\x25\xD6\x02\x56\x57\x1C",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec cast5_cbc_enc_tv_template[] = {
+ { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A",
+ .klen = 16,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\x05\x28\xCE\x61\x90\x80\xE1\x78"
+ "\xB9\x2A\x97\x7C\xB0\x83\xD8\x1A"
+ "\xDE\x58\x7F\xD7\xFD\x72\xB8\xFB"
+ "\xDA\xF0\x6E\x77\x14\x47\x82\xBA"
+ "\x29\x0E\x25\x6E\xB4\x39\xD9\x7F"
+ "\x05\xA7\xA7\x3A\xC1\x5D\x9E\x39"
+ "\xA7\xFB\x0D\x05\x00\xF3\x58\x67"
+ "\x60\xEC\x73\x77\x46\x85\x9B\x6A"
+ "\x08\x3E\xBE\x59\xFB\xE4\x96\x34"
+ "\xB4\x05\x49\x1A\x97\x43\xAD\xA0"
+ "\xA9\x1E\x6E\x74\xF1\x94\xEC\xA8"
+ "\xB5\x8A\x20\xEA\x89\x6B\x19\xAA"
+ "\xA7\xF1\x33\x67\x90\x23\x0D\xEE"
+ "\x81\xD5\x78\x4F\xD3\x63\xEA\x46"
+ "\xB5\xB2\x6E\xBB\xCA\x76\x06\x10"
+ "\x96\x2A\x0A\xBA\xF9\x41\x5A\x1D"
+ "\x36\x7C\x56\x14\x54\x83\xFA\xA1"
+ "\x27\xDD\xBA\x8A\x90\x29\xD6\xA6"
+ "\xFA\x48\x3E\x1E\x23\x6E\x98\xA8"
+ "\xA7\xD9\x67\x92\x5C\x13\xB4\x71"
+ "\xA8\xAA\x89\x4A\xA4\xB3\x49\x7C"
+ "\x7D\x7F\xCE\x6F\x29\x2E\x7E\x37"
+ "\xC8\x52\x60\xD9\xE7\xCA\x60\x98"
+ "\xED\xCD\xE8\x60\x83\xAD\x34\x4D"
+ "\x96\x4A\x99\x2B\xB7\x14\x75\x66"
+ "\x6C\x2C\x1A\xBA\x4B\xBB\x49\x56"
+ "\xE1\x86\xA2\x0E\xD0\xF0\x07\xD3"
+ "\x18\x38\x09\x9C\x0E\x8B\x86\x07"
+ "\x90\x12\x37\x49\x27\x98\x69\x18"
+ "\xB0\xCC\xFB\xD3\xBD\x04\xA0\x85"
+ "\x4B\x22\x97\x07\xB6\x97\xE9\x95"
+ "\x0F\x88\x36\xA9\x44\x00\xC6\xE9"
+ "\x27\x53\x5C\x5B\x1F\xD3\xE2\xEE"
+ "\xD0\xCD\x63\x30\xA9\xC0\xDD\x49"
+ "\xFE\x16\xA4\x07\x0D\xE2\x5D\x97"
+ "\xDE\x89\xBA\x2E\xF3\xA9\x5E\xBE"
+ "\x03\x55\x0E\x02\x41\x4A\x45\x06"
+ "\xBE\xEA\x32\xF2\xDC\x91\x5C\x20"
+ "\x94\x02\x30\xD2\xFC\x29\xFA\x8E"
+ "\x34\xA0\x31\xB8\x34\xBA\xAE\x54"
+ "\xB5\x88\x1F\xDC\x43\xDC\x22\x9F"
+ "\xDC\xCE\xD3\xFA\xA4\xA8\xBC\x8A"
+ "\xC7\x5A\x43\x21\xA5\xB1\xDB\xC3"
+ "\x84\x3B\xB4\x9B\xB5\xA7\xF1\x0A"
+ "\xB6\x37\x21\x19\x55\xC2\xBD\x99"
+ "\x49\x24\xBB\x7C\xB3\x8E\xEF\xD2"
+ "\x3A\xCF\xA0\x31\x28\x0E\x25\xA2"
+ "\x11\xB4\x18\x17\x1A\x65\x92\x56"
+ "\xE8\xE0\x52\x9C\x61\x18\x2A\xB1"
+ "\x1A\x01\x22\x45\x17\x62\x52\x6C"
+ "\x91\x44\xCF\x98\xC7\xC0\x79\x26"
+ "\x32\x66\x6F\x23\x7F\x94\x36\x88"
+ "\x3C\xC9\xD0\xB7\x45\x30\x31\x86"
+ "\x3D\xC6\xA3\x98\x62\x84\x1A\x8B"
+ "\x16\x88\xC7\xA3\xE9\x4F\xE0\x86"
+ "\xA4\x93\xA8\x34\x5A\xCA\xDF\xCA"
+ "\x46\x38\xD2\xF4\xE0\x2D\x1E\xC9"
+ "\x7C\xEF\x53\xB7\x60\x72\x41\xBF"
+ "\x29\x00\x87\x02\xAF\x44\x4C\xB7"
+ "\x8C\xF5\x3F\x19\xF4\x80\x45\xA7"
+ "\x15\x5F\xDB\xE9\xB1\x83\xD2\xE6"
+ "\x1D\x18\x66\x44\x5B\x8F\x14\xEB",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec cast5_cbc_dec_tv_template[] = {
+ { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A",
+ .klen = 16,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\x05\x28\xCE\x61\x90\x80\xE1\x78"
+ "\xB9\x2A\x97\x7C\xB0\x83\xD8\x1A"
+ "\xDE\x58\x7F\xD7\xFD\x72\xB8\xFB"
+ "\xDA\xF0\x6E\x77\x14\x47\x82\xBA"
+ "\x29\x0E\x25\x6E\xB4\x39\xD9\x7F"
+ "\x05\xA7\xA7\x3A\xC1\x5D\x9E\x39"
+ "\xA7\xFB\x0D\x05\x00\xF3\x58\x67"
+ "\x60\xEC\x73\x77\x46\x85\x9B\x6A"
+ "\x08\x3E\xBE\x59\xFB\xE4\x96\x34"
+ "\xB4\x05\x49\x1A\x97\x43\xAD\xA0"
+ "\xA9\x1E\x6E\x74\xF1\x94\xEC\xA8"
+ "\xB5\x8A\x20\xEA\x89\x6B\x19\xAA"
+ "\xA7\xF1\x33\x67\x90\x23\x0D\xEE"
+ "\x81\xD5\x78\x4F\xD3\x63\xEA\x46"
+ "\xB5\xB2\x6E\xBB\xCA\x76\x06\x10"
+ "\x96\x2A\x0A\xBA\xF9\x41\x5A\x1D"
+ "\x36\x7C\x56\x14\x54\x83\xFA\xA1"
+ "\x27\xDD\xBA\x8A\x90\x29\xD6\xA6"
+ "\xFA\x48\x3E\x1E\x23\x6E\x98\xA8"
+ "\xA7\xD9\x67\x92\x5C\x13\xB4\x71"
+ "\xA8\xAA\x89\x4A\xA4\xB3\x49\x7C"
+ "\x7D\x7F\xCE\x6F\x29\x2E\x7E\x37"
+ "\xC8\x52\x60\xD9\xE7\xCA\x60\x98"
+ "\xED\xCD\xE8\x60\x83\xAD\x34\x4D"
+ "\x96\x4A\x99\x2B\xB7\x14\x75\x66"
+ "\x6C\x2C\x1A\xBA\x4B\xBB\x49\x56"
+ "\xE1\x86\xA2\x0E\xD0\xF0\x07\xD3"
+ "\x18\x38\x09\x9C\x0E\x8B\x86\x07"
+ "\x90\x12\x37\x49\x27\x98\x69\x18"
+ "\xB0\xCC\xFB\xD3\xBD\x04\xA0\x85"
+ "\x4B\x22\x97\x07\xB6\x97\xE9\x95"
+ "\x0F\x88\x36\xA9\x44\x00\xC6\xE9"
+ "\x27\x53\x5C\x5B\x1F\xD3\xE2\xEE"
+ "\xD0\xCD\x63\x30\xA9\xC0\xDD\x49"
+ "\xFE\x16\xA4\x07\x0D\xE2\x5D\x97"
+ "\xDE\x89\xBA\x2E\xF3\xA9\x5E\xBE"
+ "\x03\x55\x0E\x02\x41\x4A\x45\x06"
+ "\xBE\xEA\x32\xF2\xDC\x91\x5C\x20"
+ "\x94\x02\x30\xD2\xFC\x29\xFA\x8E"
+ "\x34\xA0\x31\xB8\x34\xBA\xAE\x54"
+ "\xB5\x88\x1F\xDC\x43\xDC\x22\x9F"
+ "\xDC\xCE\xD3\xFA\xA4\xA8\xBC\x8A"
+ "\xC7\x5A\x43\x21\xA5\xB1\xDB\xC3"
+ "\x84\x3B\xB4\x9B\xB5\xA7\xF1\x0A"
+ "\xB6\x37\x21\x19\x55\xC2\xBD\x99"
+ "\x49\x24\xBB\x7C\xB3\x8E\xEF\xD2"
+ "\x3A\xCF\xA0\x31\x28\x0E\x25\xA2"
+ "\x11\xB4\x18\x17\x1A\x65\x92\x56"
+ "\xE8\xE0\x52\x9C\x61\x18\x2A\xB1"
+ "\x1A\x01\x22\x45\x17\x62\x52\x6C"
+ "\x91\x44\xCF\x98\xC7\xC0\x79\x26"
+ "\x32\x66\x6F\x23\x7F\x94\x36\x88"
+ "\x3C\xC9\xD0\xB7\x45\x30\x31\x86"
+ "\x3D\xC6\xA3\x98\x62\x84\x1A\x8B"
+ "\x16\x88\xC7\xA3\xE9\x4F\xE0\x86"
+ "\xA4\x93\xA8\x34\x5A\xCA\xDF\xCA"
+ "\x46\x38\xD2\xF4\xE0\x2D\x1E\xC9"
+ "\x7C\xEF\x53\xB7\x60\x72\x41\xBF"
+ "\x29\x00\x87\x02\xAF\x44\x4C\xB7"
+ "\x8C\xF5\x3F\x19\xF4\x80\x45\xA7"
+ "\x15\x5F\xDB\xE9\xB1\x83\xD2\xE6"
+ "\x1D\x18\x66\x44\x5B\x8F\x14\xEB",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec cast5_ctr_enc_tv_template[] = {
+ { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A",
+ .klen = 16,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A",
+ .ilen = 17,
+ .result = "\xFF\xC4\x2E\x82\x3D\xF8\xA8\x39"
+ "\x7C\x52\xC4\xD3\xBB\x62\xC6\xA8"
+ "\x0C",
+ .rlen = 17,
+ }, { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A",
+ .klen = 16,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\xFF\xC4\x2E\x82\x3D\xF8\xA8\x39"
+ "\x7C\x52\xC4\xD3\xBB\x62\xC6\xA8"
+ "\x0C\x63\xA5\x55\xE3\xF8\x1C\x7F"
+ "\xDC\x59\xF9\xA0\x52\xAD\x83\xDF"
+ "\xD5\x3B\x53\x4A\xAA\x1F\x49\x44"
+ "\xE8\x20\xCC\xF8\x97\xE6\xE0\x3C"
+ "\x5A\xD2\x83\xEC\xEE\x25\x3F\xCF"
+ "\x0D\xC2\x79\x80\x99\x6E\xFF\x7B"
+ "\x64\xB0\x7B\x86\x29\x1D\x9F\x17"
+ "\x10\xA5\xA5\xEB\x16\x55\x9E\xE3"
+ "\x88\x18\x52\x56\x48\x58\xD1\x6B"
+ "\xE8\x74\x6E\x48\xB0\x2E\x69\x63"
+ "\x32\xAA\xAC\x26\x55\x45\x94\xDE"
+ "\x30\x26\x26\xE6\x08\x82\x2F\x5F"
+ "\xA7\x15\x94\x07\x75\x2D\xC6\x3A"
+ "\x1B\xA0\x39\xFB\xBA\xB9\x06\x56"
+ "\xF6\x9F\xF1\x2F\x9B\xF3\x89\x8B"
+ "\x08\xC8\x9D\x5E\x6B\x95\x09\xC7"
+ "\x98\xB7\x62\xA4\x1D\x25\xFA\xC5"
+ "\x62\xC8\x5D\x6B\xB4\x85\x88\x7F"
+ "\x3B\x29\xF9\xB4\x32\x62\x69\xBF"
+ "\x32\xB8\xEB\xFD\x0E\x26\xAA\xA3"
+ "\x44\x67\x90\x20\xAC\x41\xDF\x43"
+ "\xC6\xC7\x19\x9F\x2C\x28\x74\xEB"
+ "\x3E\x7F\x7A\x80\x5B\xE4\x08\x60"
+ "\xC7\xC9\x71\x34\x44\xCE\x05\xFD"
+ "\xA8\x91\xA8\x44\x5E\xD3\x89\x2C"
+ "\xAE\x59\x0F\x07\x88\x79\x53\x26"
+ "\xAF\xAC\xCB\x1D\x6F\x08\x25\x62"
+ "\xD0\x82\x65\x66\xE4\x2A\x29\x1C"
+ "\x9C\x64\x5F\x49\x9D\xF8\x62\xF9"
+ "\xED\xC4\x13\x52\x75\xDC\xE4\xF9"
+ "\x68\x0F\x8A\xCD\xA6\x8D\x75\xAA"
+ "\x49\xA1\x86\x86\x37\x5C\x6B\x3D"
+ "\x56\xE5\x6F\xBE\x27\xC0\x10\xF8"
+ "\x3C\x4D\x17\x35\x14\xDC\x1C\xA0"
+ "\x6E\xAE\xD1\x10\xDD\x83\x06\xC2"
+ "\x23\xD3\xC7\x27\x15\x04\x2C\x27"
+ "\xDD\x1F\x2E\x97\x09\x9C\x33\x7D"
+ "\xAC\x50\x1B\x2E\xC9\x52\x0C\x14"
+ "\x4B\x78\xC4\xDE\x07\x6A\x12\x02"
+ "\x6E\xD7\x4B\x91\xB9\x88\x4D\x02"
+ "\xC3\xB5\x04\xBC\xE0\x67\xCA\x18"
+ "\x22\xA1\xAE\x9A\x21\xEF\xB2\x06"
+ "\x35\xCD\xEC\x37\x70\x2D\xFC\x1E"
+ "\xA8\x31\xE7\xFC\xE5\x8E\x88\x66"
+ "\x16\xB5\xC8\x45\x21\x37\xBD\x24"
+ "\xA9\xD5\x36\x12\x9F\x6E\x67\x80"
+ "\x87\x54\xD5\xAF\x97\xE1\x15\xA7"
+ "\x11\xF0\x63\x7B\xE1\x44\x14\x1C"
+ "\x06\x32\x05\x8C\x6C\xDB\x9B\x36"
+ "\x6A\x6B\xAD\x3A\x27\x55\x20\x4C"
+ "\x76\x36\x43\xE8\x16\x60\xB5\xF3"
+ "\xDF\x5A\xC6\xA5\x69\x78\x59\x51"
+ "\x54\x68\x65\x06\x84\xDE\x3D\xAE"
+ "\x38\x91\xBD\xCC\xA2\x8A\xEC\xE6"
+ "\x9E\x83\xAE\x1E\x8E\x34\x5D\xDE"
+ "\x91\xCE\x8F\xED\x40\xF7\xC8\x8B"
+ "\x9A\x13\x4C\xAD\x89\x97\x9E\xD1"
+ "\x91\x01\xD7\x21\x23\x28\x1E\xCC"
+ "\x8C\x98\xDB\xDE\xFC\x72\x94\xAA"
+ "\xC0\x0D\x96\xAA\x23\xF8\xFE\x13",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec cast5_ctr_dec_tv_template[] = {
+ { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A",
+ .klen = 16,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\xFF\xC4\x2E\x82\x3D\xF8\xA8\x39"
+ "\x7C\x52\xC4\xD3\xBB\x62\xC6\xA8"
+ "\x0C",
+ .ilen = 17,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A",
+ .rlen = 17,
+ }, { /* Generated from TF test vectors */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A",
+ .klen = 16,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\xFF\xC4\x2E\x82\x3D\xF8\xA8\x39"
+ "\x7C\x52\xC4\xD3\xBB\x62\xC6\xA8"
+ "\x0C\x63\xA5\x55\xE3\xF8\x1C\x7F"
+ "\xDC\x59\xF9\xA0\x52\xAD\x83\xDF"
+ "\xD5\x3B\x53\x4A\xAA\x1F\x49\x44"
+ "\xE8\x20\xCC\xF8\x97\xE6\xE0\x3C"
+ "\x5A\xD2\x83\xEC\xEE\x25\x3F\xCF"
+ "\x0D\xC2\x79\x80\x99\x6E\xFF\x7B"
+ "\x64\xB0\x7B\x86\x29\x1D\x9F\x17"
+ "\x10\xA5\xA5\xEB\x16\x55\x9E\xE3"
+ "\x88\x18\x52\x56\x48\x58\xD1\x6B"
+ "\xE8\x74\x6E\x48\xB0\x2E\x69\x63"
+ "\x32\xAA\xAC\x26\x55\x45\x94\xDE"
+ "\x30\x26\x26\xE6\x08\x82\x2F\x5F"
+ "\xA7\x15\x94\x07\x75\x2D\xC6\x3A"
+ "\x1B\xA0\x39\xFB\xBA\xB9\x06\x56"
+ "\xF6\x9F\xF1\x2F\x9B\xF3\x89\x8B"
+ "\x08\xC8\x9D\x5E\x6B\x95\x09\xC7"
+ "\x98\xB7\x62\xA4\x1D\x25\xFA\xC5"
+ "\x62\xC8\x5D\x6B\xB4\x85\x88\x7F"
+ "\x3B\x29\xF9\xB4\x32\x62\x69\xBF"
+ "\x32\xB8\xEB\xFD\x0E\x26\xAA\xA3"
+ "\x44\x67\x90\x20\xAC\x41\xDF\x43"
+ "\xC6\xC7\x19\x9F\x2C\x28\x74\xEB"
+ "\x3E\x7F\x7A\x80\x5B\xE4\x08\x60"
+ "\xC7\xC9\x71\x34\x44\xCE\x05\xFD"
+ "\xA8\x91\xA8\x44\x5E\xD3\x89\x2C"
+ "\xAE\x59\x0F\x07\x88\x79\x53\x26"
+ "\xAF\xAC\xCB\x1D\x6F\x08\x25\x62"
+ "\xD0\x82\x65\x66\xE4\x2A\x29\x1C"
+ "\x9C\x64\x5F\x49\x9D\xF8\x62\xF9"
+ "\xED\xC4\x13\x52\x75\xDC\xE4\xF9"
+ "\x68\x0F\x8A\xCD\xA6\x8D\x75\xAA"
+ "\x49\xA1\x86\x86\x37\x5C\x6B\x3D"
+ "\x56\xE5\x6F\xBE\x27\xC0\x10\xF8"
+ "\x3C\x4D\x17\x35\x14\xDC\x1C\xA0"
+ "\x6E\xAE\xD1\x10\xDD\x83\x06\xC2"
+ "\x23\xD3\xC7\x27\x15\x04\x2C\x27"
+ "\xDD\x1F\x2E\x97\x09\x9C\x33\x7D"
+ "\xAC\x50\x1B\x2E\xC9\x52\x0C\x14"
+ "\x4B\x78\xC4\xDE\x07\x6A\x12\x02"
+ "\x6E\xD7\x4B\x91\xB9\x88\x4D\x02"
+ "\xC3\xB5\x04\xBC\xE0\x67\xCA\x18"
+ "\x22\xA1\xAE\x9A\x21\xEF\xB2\x06"
+ "\x35\xCD\xEC\x37\x70\x2D\xFC\x1E"
+ "\xA8\x31\xE7\xFC\xE5\x8E\x88\x66"
+ "\x16\xB5\xC8\x45\x21\x37\xBD\x24"
+ "\xA9\xD5\x36\x12\x9F\x6E\x67\x80"
+ "\x87\x54\xD5\xAF\x97\xE1\x15\xA7"
+ "\x11\xF0\x63\x7B\xE1\x44\x14\x1C"
+ "\x06\x32\x05\x8C\x6C\xDB\x9B\x36"
+ "\x6A\x6B\xAD\x3A\x27\x55\x20\x4C"
+ "\x76\x36\x43\xE8\x16\x60\xB5\xF3"
+ "\xDF\x5A\xC6\xA5\x69\x78\x59\x51"
+ "\x54\x68\x65\x06\x84\xDE\x3D\xAE"
+ "\x38\x91\xBD\xCC\xA2\x8A\xEC\xE6"
+ "\x9E\x83\xAE\x1E\x8E\x34\x5D\xDE"
+ "\x91\xCE\x8F\xED\x40\xF7\xC8\x8B"
+ "\x9A\x13\x4C\xAD\x89\x97\x9E\xD1"
+ "\x91\x01\xD7\x21\x23\x28\x1E\xCC"
+ "\x8C\x98\xDB\xDE\xFC\x72\x94\xAA"
+ "\xC0\x0D\x96\xAA\x23\xF8\xFE\x13",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 496 - 16, 16 },
},
};
@@ -6726,10 +22499,16 @@ static struct cipher_testvec fcrypt_pcbc_dec_tv_template[] = {
/*
* CAMELLIA test vectors.
*/
-#define CAMELLIA_ENC_TEST_VECTORS 3
-#define CAMELLIA_DEC_TEST_VECTORS 3
-#define CAMELLIA_CBC_ENC_TEST_VECTORS 2
-#define CAMELLIA_CBC_DEC_TEST_VECTORS 2
+#define CAMELLIA_ENC_TEST_VECTORS 4
+#define CAMELLIA_DEC_TEST_VECTORS 4
+#define CAMELLIA_CBC_ENC_TEST_VECTORS 3
+#define CAMELLIA_CBC_DEC_TEST_VECTORS 3
+#define CAMELLIA_CTR_ENC_TEST_VECTORS 2
+#define CAMELLIA_CTR_DEC_TEST_VECTORS 2
+#define CAMELLIA_LRW_ENC_TEST_VECTORS 8
+#define CAMELLIA_LRW_DEC_TEST_VECTORS 8
+#define CAMELLIA_XTS_ENC_TEST_VECTORS 5
+#define CAMELLIA_XTS_DEC_TEST_VECTORS 5
static struct cipher_testvec camellia_enc_tv_template[] = {
{
@@ -6765,6 +22544,269 @@ static struct cipher_testvec camellia_enc_tv_template[] = {
.result = "\x9a\xcc\x23\x7d\xff\x16\xd7\x6c"
"\x20\xef\x7c\x91\x9e\x3a\x75\x09",
.rlen = 16,
+ }, { /* Generated with Crypto++ */
+ .key = "\x3F\x85\x62\x3F\x1C\xF9\xD6\x1C"
+ "\xF9\xD6\xB3\x90\x6D\x4A\x90\x6D"
+ "\x4A\x27\x04\xE1\x27\x04\xE1\xBE"
+ "\x9B\x78\xBE\x9B\x78\x55\x32\x0F",
+ .klen = 32,
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06"
+ "\x9D\x34\xCB\x3F\xD6\x6D\x04\x78"
+ "\x0F\xA6\x1A\xB1\x48\xDF\x53\xEA"
+ "\x81\x18\x8C\x23\xBA\x2E\xC5\x5C"
+ "\xF3\x67\xFE\x95\x09\xA0\x37\xCE"
+ "\x42\xD9\x70\x07\x7B\x12\xA9\x1D"
+ "\xB4\x4B\xE2\x56\xED\x84\x1B\x8F"
+ "\x26\xBD\x31\xC8\x5F\xF6\x6A\x01"
+ "\x98\x0C\xA3\x3A\xD1\x45\xDC\x73"
+ "\x0A\x7E\x15\xAC\x20\xB7\x4E\xE5"
+ "\x59\xF0\x87\x1E\x92\x29\xC0\x34"
+ "\xCB\x62\xF9\x6D\x04\x9B\x0F\xA6"
+ "\x3D\xD4\x48\xDF\x76\x0D\x81\x18"
+ "\xAF\x23\xBA\x51\xE8\x5C\xF3\x8A"
+ "\x21\x95\x2C\xC3\x37\xCE\x65\xFC"
+ "\x70\x07\x9E\x12\xA9\x40\xD7\x4B"
+ "\xE2\x79\x10\x84\x1B\xB2\x26\xBD"
+ "\x54\xEB\x5F\xF6\x8D\x01\x98\x2F"
+ "\xC6\x3A\xD1\x68\xFF\x73\x0A\xA1"
+ "\x15\xAC\x43\xDA\x4E\xE5\x7C\x13"
+ "\x87\x1E\xB5\x29\xC0\x57\xEE\x62"
+ "\xF9\x90\x04\x9B\x32\xC9\x3D\xD4"
+ "\x6B\x02\x76\x0D\xA4\x18\xAF\x46"
+ "\xDD\x51\xE8\x7F\x16\x8A\x21\xB8"
+ "\x2C\xC3\x5A\xF1\x65\xFC\x93\x07"
+ "\x9E\x35\xCC\x40\xD7\x6E\x05\x79"
+ "\x10\xA7\x1B\xB2\x49\xE0\x54\xEB"
+ "\x82\x19\x8D\x24\xBB\x2F\xC6\x5D"
+ "\xF4\x68\xFF\x96\x0A\xA1\x38\xCF"
+ "\x43\xDA\x71\x08\x7C\x13\xAA\x1E"
+ "\xB5\x4C\xE3\x57\xEE\x85\x1C\x90"
+ "\x27\xBE\x32\xC9\x60\xF7\x6B\x02"
+ "\x99\x0D\xA4\x3B\xD2\x46\xDD\x74"
+ "\x0B\x7F\x16\xAD\x21\xB8\x4F\xE6"
+ "\x5A\xF1\x88\x1F\x93\x2A\xC1\x35"
+ "\xCC\x63\xFA\x6E\x05\x9C\x10\xA7"
+ "\x3E\xD5\x49\xE0\x77\x0E\x82\x19"
+ "\xB0\x24\xBB\x52\xE9\x5D\xF4\x8B"
+ "\x22\x96\x2D\xC4\x38\xCF\x66\xFD"
+ "\x71\x08\x9F\x13\xAA\x41\xD8\x4C"
+ "\xE3\x7A\x11\x85\x1C\xB3\x27\xBE"
+ "\x55\xEC\x60\xF7\x8E\x02\x99\x30"
+ "\xC7\x3B\xD2\x69\x00\x74\x0B\xA2"
+ "\x16\xAD\x44\xDB\x4F\xE6\x7D\x14"
+ "\x88\x1F\xB6\x2A\xC1\x58\xEF\x63"
+ "\xFA\x91\x05\x9C\x33\xCA\x3E\xD5"
+ "\x6C\x03\x77\x0E\xA5\x19\xB0\x47"
+ "\xDE\x52\xE9\x80\x17\x8B\x22\xB9"
+ "\x2D\xC4\x5B\xF2\x66\xFD\x94\x08"
+ "\x9F\x36\xCD\x41\xD8\x6F\x06\x7A"
+ "\x11\xA8\x1C\xB3\x4A\xE1\x55\xEC"
+ "\x83\x1A\x8E\x25\xBC\x30\xC7\x5E"
+ "\xF5\x69\x00\x97\x0B\xA2\x39\xD0"
+ "\x44\xDB\x72\x09\x7D\x14\xAB\x1F"
+ "\xB6\x4D\xE4\x58\xEF\x86\x1D\x91"
+ "\x28\xBF\x33\xCA\x61\xF8\x6C\x03"
+ "\x9A\x0E\xA5\x3C\xD3\x47\xDE\x75"
+ "\x0C\x80\x17\xAE\x22\xB9\x50\xE7"
+ "\x5B\xF2\x89\x20\x94\x2B\xC2\x36"
+ "\xCD\x64\xFB\x6F\x06\x9D\x11\xA8"
+ "\x3F\xD6\x4A\xE1\x78\x0F\x83\x1A"
+ "\xB1\x25\xBC\x53\xEA\x5E\xF5\x8C"
+ "\x00\x97\x2E\xC5\x39\xD0\x67\xFE"
+ "\x72\x09\xA0\x14\xAB\x42\xD9\x4D",
+ .ilen = 1008,
+ .result = "\xED\xCD\xDB\xB8\x68\xCE\xBD\xEA"
+ "\x9D\x9D\xCD\x9F\x4F\xFC\x4D\xB7"
+ "\xA5\xFF\x6F\x43\x0F\xBA\x32\x04"
+ "\xB3\xC2\xB9\x03\xAA\x91\x56\x29"
+ "\x0D\xD0\xFD\xC4\x65\xA5\x69\xB9"
+ "\xF1\xF6\xB1\xA5\xB2\x75\x4F\x8A"
+ "\x8D\x7D\x1B\x9B\xC7\x68\x72\xF8"
+ "\x01\x9B\x17\x0A\x29\xE7\x61\x28"
+ "\x7F\xA7\x50\xCA\x20\x2C\x96\x3B"
+ "\x6E\x5C\x5D\x3F\xB5\x7F\xF3\x2B"
+ "\x04\xEF\x9D\xD4\xCE\x41\x28\x8E"
+ "\x83\x54\xAE\x7C\x82\x46\x10\xC9"
+ "\xC4\x8A\x1E\x1F\x4C\xA9\xFC\xEC"
+ "\x3C\x8C\x30\xFC\x59\xD2\x54\xC4"
+ "\x6F\x50\xC6\xCA\x8C\x14\x5B\x9C"
+ "\x18\x56\x5B\xF8\x33\x0E\x4A\xDB"
+ "\xEC\xB5\x6E\x5B\x31\xC4\x0E\x98"
+ "\x9F\x32\xBA\xA2\x18\xCF\x55\x43"
+ "\xFE\x80\x8F\x60\xCF\x05\x30\x9B"
+ "\x70\x50\x1E\x9C\x08\x87\xE6\x20"
+ "\xD2\xF3\x27\xF8\x2A\x8D\x12\xB2"
+ "\xBC\x5F\xFE\x52\x52\xF6\x7F\xB6"
+ "\xB8\x30\x86\x3B\x0F\x94\x1E\x79"
+ "\x13\x94\x35\xA2\xB1\x35\x5B\x05"
+ "\x2A\x98\x6B\x96\x4C\xB1\x20\xBE"
+ "\xB6\x14\xC2\x06\xBF\xFD\x5F\x2A"
+ "\xF5\x33\xC8\x19\x45\x14\x44\x5D"
+ "\xFE\x94\x7B\xBB\x63\x13\x57\xC3"
+ "\x2A\x8F\x6C\x11\x2A\x07\xA7\x6A"
+ "\xBF\x20\xD3\x99\xC6\x00\x0B\xBF"
+ "\x83\x46\x25\x3A\xB0\xF6\xC5\xC8"
+ "\x00\xCA\xE5\x28\x4A\x7C\x95\x9C"
+ "\x7B\x43\xAB\xF9\xE4\xF8\x74\xAB"
+ "\xA7\xB8\x9C\x0F\x53\x7B\xB6\x74"
+ "\x60\x64\x0D\x1C\x80\xD1\x20\x9E"
+ "\xDC\x14\x27\x9B\xFC\xBD\x5C\x96"
+ "\xD2\x51\xDC\x96\xEE\xE5\xEA\x2B"
+ "\x02\x7C\xAA\x3C\xDC\x9D\x7B\x01"
+ "\x20\xC3\xE1\x0B\xDD\xAB\xF3\x1E"
+ "\x19\xA8\x84\x29\x5F\xCC\xC3\x5B"
+ "\xE4\x33\x59\xDC\x12\xEB\x2B\x4D"
+ "\x5B\x55\x23\xB7\x40\x31\xDE\xEE"
+ "\x18\xC9\x3C\x4D\xBC\xED\xE0\x42"
+ "\xAD\xDE\xA0\xA3\xC3\xFE\x44\xD3"
+ "\xE1\x9A\xDA\xAB\x32\xFC\x1A\xBF"
+ "\x63\xA9\xF0\x6A\x08\x46\xBD\x48"
+ "\x83\x06\xAB\x82\x99\x01\x16\x1A"
+ "\x03\x36\xC5\x59\x6B\xB8\x8C\x9F"
+ "\xC6\x51\x3D\xE5\x7F\xBF\xAB\xBC"
+ "\xC9\xA1\x88\x34\x5F\xA9\x7C\x3B"
+ "\x9F\x1B\x98\x2B\x4F\xFB\x9B\xF0"
+ "\xCD\xB6\x45\xB2\x29\x2E\x34\x23"
+ "\xA9\x97\xC0\x22\x8C\x42\x9B\x5F"
+ "\x40\xC8\xD7\x3D\x82\x9A\x6F\xAA"
+ "\x74\x83\x29\x05\xE8\xC4\x4D\x01"
+ "\xB5\xE5\x84\x3F\x7F\xD3\xE0\x99"
+ "\xDA\xE7\x6F\x30\xFD\xAA\x92\x30"
+ "\xA5\x46\x8B\xA2\xE6\x58\x62\x7C"
+ "\x2C\x35\x1B\x38\x85\x7D\xE8\xF3"
+ "\x87\x4F\xDA\xD8\x5F\xFC\xB6\x44"
+ "\xD0\xE3\x9B\x8B\xBF\xD6\xB8\xC4"
+ "\x73\xAE\x1D\x8B\x5B\x74\x8B\xCB"
+ "\xA4\xAD\xCF\x5D\xD4\x58\xC9\xCD"
+ "\xF7\x90\x68\xCF\xC9\x11\x52\x3E"
+ "\xE8\xA1\xA3\x78\x8B\xD0\xAC\x0A"
+ "\xD4\xC9\xA3\xA5\x55\x30\xC8\x3E"
+ "\xED\x28\x39\xE9\x63\xED\x41\x70"
+ "\x51\xE3\xC4\xA0\xFC\xD5\x43\xCB"
+ "\x4D\x65\xC8\xFD\x3A\x91\x8F\x60"
+ "\x8A\xA6\x6D\x9D\x3E\x01\x23\x4B"
+ "\x50\x47\xC9\xDC\x9B\xDE\x37\xC5"
+ "\xBF\x67\xB1\x6B\x78\x38\xD5\x7E"
+ "\xB6\xFF\x67\x83\x3B\x6E\xBE\x23"
+ "\x45\xFA\x1D\x69\x44\xFD\xC6\xB9"
+ "\xD0\x4A\x92\xD1\xBE\xF6\x4A\xB7"
+ "\xCA\xA8\xA2\x9E\x13\x87\x57\x92"
+ "\x64\x7C\x85\x0B\xB3\x29\x37\xD8"
+ "\xE6\xAA\xAF\xC4\x03\x67\xA3\xBF"
+ "\x2E\x45\x83\xB6\xD8\x54\x00\x89"
+ "\xF6\xBC\x3A\x7A\x88\x58\x51\xED"
+ "\xF4\x4E\x01\xA5\xC3\x2E\xD9\x42"
+ "\xBD\x6E\x0D\x0B\x21\xB0\x1A\xCC"
+ "\xA4\xD3\x3F\xDC\x9B\x81\xD8\xF1"
+ "\xEA\x7A\x6A\xB7\x07\xC9\x6D\x91"
+ "\x6D\x3A\xF5\x5F\xA6\xFF\x87\x1E"
+ "\x3F\xDD\xC0\x72\xEA\xAC\x08\x15"
+ "\x21\xE6\xC6\xB6\x0D\xD8\x51\x86"
+ "\x2A\x03\x73\xF7\x29\xD4\xC4\xE4"
+ "\x7F\x95\x10\xF7\xAB\x3F\x92\x23"
+ "\xD3\xCE\x9C\x2E\x46\x3B\x63\x43"
+ "\xBB\xC2\x82\x7A\x83\xD5\x55\xE2"
+ "\xE7\x9B\x2F\x92\xAF\xFD\x81\x56"
+ "\x79\xFD\x3E\xF9\x46\xE0\x25\xD4"
+ "\x38\xDE\xBC\x2C\xC4\x7A\x2A\x8F"
+ "\x94\x4F\xD0\xAD\x9B\x37\x18\xD4"
+ "\x0E\x4D\x0F\x02\x3A\xDC\x5A\xA2"
+ "\x39\x25\x55\x20\x5A\xA6\x02\x9F"
+ "\xE6\x77\x21\x77\xE5\x4B\x7B\x0B"
+ "\x30\xF8\x5F\x33\x0F\x49\xCD\xFF"
+ "\xF2\xE4\x35\xF9\xF0\x63\xC3\x7E"
+ "\xF1\xA6\x73\xB4\xDF\xE7\xBB\x78"
+ "\xFF\x21\xA9\xF3\xF3\xCF\x5D\xBA"
+ "\xED\x87\x98\xAC\xFE\x48\x97\x6D"
+ "\xA6\x7F\x69\x31\xB1\xC4\xFF\x14"
+ "\xC6\x76\xD4\x10\xDD\xF6\x49\x2C"
+ "\x9C\xC8\x6D\x76\xC0\x8F\x5F\x55"
+ "\x2F\x3C\x8A\x30\xAA\xC3\x16\x55"
+ "\xC6\xFC\x8D\x8B\xB9\xE5\x80\x6C"
+ "\xC8\x7E\xBD\x65\x58\x36\xD5\xBC"
+ "\xF0\x33\x52\x29\x70\xF9\x5C\xE9"
+ "\xAC\x1F\xB5\x73\x56\x66\x54\xAF"
+ "\x1B\x8F\x7D\xED\xAB\x03\xCE\xE3"
+ "\xAE\x47\xB6\x69\x86\xE9\x01\x31"
+ "\x83\x18\x3D\xF4\x74\x7B\xF9\x42"
+ "\x4C\xFD\x75\x4A\x6D\xF0\x03\xA6"
+ "\x2B\x20\x63\xDA\x49\x65\x5E\x8B"
+ "\xC0\x19\xE3\x8D\xD9\xF3\xB0\x34"
+ "\xD3\x52\xFC\x68\x00\x43\x1B\x37"
+ "\x31\x93\x51\x1C\x63\x97\x70\xB0"
+ "\x99\x78\x83\x13\xFD\xCF\x53\x81"
+ "\x36\x46\xB5\x42\x52\x2F\x32\xEB"
+ "\x4A\x3D\xF1\x8F\x1C\x54\x2E\xFC"
+ "\x41\x75\x5A\x8C\x8E\x6F\xE7\x1A"
+ "\xAE\xEF\x3E\x82\x12\x0B\x74\x72"
+ "\xF8\xB2\xAA\x7A\xD6\xFF\xFA\x55"
+ "\x33\x1A\xBB\xD3\xA2\x7E\x97\x66",
+ .rlen = 1008,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 1008 - 16, 16 },
},
};
@@ -6802,6 +22844,269 @@ static struct cipher_testvec camellia_dec_tv_template[] = {
.result = "\x01\x23\x45\x67\x89\xab\xcd\xef"
"\xfe\xdc\xba\x98\x76\x54\x32\x10",
.rlen = 16,
+ }, { /* Generated with Crypto++ */
+ .key = "\x3F\x85\x62\x3F\x1C\xF9\xD6\x1C"
+ "\xF9\xD6\xB3\x90\x6D\x4A\x90\x6D"
+ "\x4A\x27\x04\xE1\x27\x04\xE1\xBE"
+ "\x9B\x78\xBE\x9B\x78\x55\x32\x0F",
+ .klen = 32,
+ .input = "\xED\xCD\xDB\xB8\x68\xCE\xBD\xEA"
+ "\x9D\x9D\xCD\x9F\x4F\xFC\x4D\xB7"
+ "\xA5\xFF\x6F\x43\x0F\xBA\x32\x04"
+ "\xB3\xC2\xB9\x03\xAA\x91\x56\x29"
+ "\x0D\xD0\xFD\xC4\x65\xA5\x69\xB9"
+ "\xF1\xF6\xB1\xA5\xB2\x75\x4F\x8A"
+ "\x8D\x7D\x1B\x9B\xC7\x68\x72\xF8"
+ "\x01\x9B\x17\x0A\x29\xE7\x61\x28"
+ "\x7F\xA7\x50\xCA\x20\x2C\x96\x3B"
+ "\x6E\x5C\x5D\x3F\xB5\x7F\xF3\x2B"
+ "\x04\xEF\x9D\xD4\xCE\x41\x28\x8E"
+ "\x83\x54\xAE\x7C\x82\x46\x10\xC9"
+ "\xC4\x8A\x1E\x1F\x4C\xA9\xFC\xEC"
+ "\x3C\x8C\x30\xFC\x59\xD2\x54\xC4"
+ "\x6F\x50\xC6\xCA\x8C\x14\x5B\x9C"
+ "\x18\x56\x5B\xF8\x33\x0E\x4A\xDB"
+ "\xEC\xB5\x6E\x5B\x31\xC4\x0E\x98"
+ "\x9F\x32\xBA\xA2\x18\xCF\x55\x43"
+ "\xFE\x80\x8F\x60\xCF\x05\x30\x9B"
+ "\x70\x50\x1E\x9C\x08\x87\xE6\x20"
+ "\xD2\xF3\x27\xF8\x2A\x8D\x12\xB2"
+ "\xBC\x5F\xFE\x52\x52\xF6\x7F\xB6"
+ "\xB8\x30\x86\x3B\x0F\x94\x1E\x79"
+ "\x13\x94\x35\xA2\xB1\x35\x5B\x05"
+ "\x2A\x98\x6B\x96\x4C\xB1\x20\xBE"
+ "\xB6\x14\xC2\x06\xBF\xFD\x5F\x2A"
+ "\xF5\x33\xC8\x19\x45\x14\x44\x5D"
+ "\xFE\x94\x7B\xBB\x63\x13\x57\xC3"
+ "\x2A\x8F\x6C\x11\x2A\x07\xA7\x6A"
+ "\xBF\x20\xD3\x99\xC6\x00\x0B\xBF"
+ "\x83\x46\x25\x3A\xB0\xF6\xC5\xC8"
+ "\x00\xCA\xE5\x28\x4A\x7C\x95\x9C"
+ "\x7B\x43\xAB\xF9\xE4\xF8\x74\xAB"
+ "\xA7\xB8\x9C\x0F\x53\x7B\xB6\x74"
+ "\x60\x64\x0D\x1C\x80\xD1\x20\x9E"
+ "\xDC\x14\x27\x9B\xFC\xBD\x5C\x96"
+ "\xD2\x51\xDC\x96\xEE\xE5\xEA\x2B"
+ "\x02\x7C\xAA\x3C\xDC\x9D\x7B\x01"
+ "\x20\xC3\xE1\x0B\xDD\xAB\xF3\x1E"
+ "\x19\xA8\x84\x29\x5F\xCC\xC3\x5B"
+ "\xE4\x33\x59\xDC\x12\xEB\x2B\x4D"
+ "\x5B\x55\x23\xB7\x40\x31\xDE\xEE"
+ "\x18\xC9\x3C\x4D\xBC\xED\xE0\x42"
+ "\xAD\xDE\xA0\xA3\xC3\xFE\x44\xD3"
+ "\xE1\x9A\xDA\xAB\x32\xFC\x1A\xBF"
+ "\x63\xA9\xF0\x6A\x08\x46\xBD\x48"
+ "\x83\x06\xAB\x82\x99\x01\x16\x1A"
+ "\x03\x36\xC5\x59\x6B\xB8\x8C\x9F"
+ "\xC6\x51\x3D\xE5\x7F\xBF\xAB\xBC"
+ "\xC9\xA1\x88\x34\x5F\xA9\x7C\x3B"
+ "\x9F\x1B\x98\x2B\x4F\xFB\x9B\xF0"
+ "\xCD\xB6\x45\xB2\x29\x2E\x34\x23"
+ "\xA9\x97\xC0\x22\x8C\x42\x9B\x5F"
+ "\x40\xC8\xD7\x3D\x82\x9A\x6F\xAA"
+ "\x74\x83\x29\x05\xE8\xC4\x4D\x01"
+ "\xB5\xE5\x84\x3F\x7F\xD3\xE0\x99"
+ "\xDA\xE7\x6F\x30\xFD\xAA\x92\x30"
+ "\xA5\x46\x8B\xA2\xE6\x58\x62\x7C"
+ "\x2C\x35\x1B\x38\x85\x7D\xE8\xF3"
+ "\x87\x4F\xDA\xD8\x5F\xFC\xB6\x44"
+ "\xD0\xE3\x9B\x8B\xBF\xD6\xB8\xC4"
+ "\x73\xAE\x1D\x8B\x5B\x74\x8B\xCB"
+ "\xA4\xAD\xCF\x5D\xD4\x58\xC9\xCD"
+ "\xF7\x90\x68\xCF\xC9\x11\x52\x3E"
+ "\xE8\xA1\xA3\x78\x8B\xD0\xAC\x0A"
+ "\xD4\xC9\xA3\xA5\x55\x30\xC8\x3E"
+ "\xED\x28\x39\xE9\x63\xED\x41\x70"
+ "\x51\xE3\xC4\xA0\xFC\xD5\x43\xCB"
+ "\x4D\x65\xC8\xFD\x3A\x91\x8F\x60"
+ "\x8A\xA6\x6D\x9D\x3E\x01\x23\x4B"
+ "\x50\x47\xC9\xDC\x9B\xDE\x37\xC5"
+ "\xBF\x67\xB1\x6B\x78\x38\xD5\x7E"
+ "\xB6\xFF\x67\x83\x3B\x6E\xBE\x23"
+ "\x45\xFA\x1D\x69\x44\xFD\xC6\xB9"
+ "\xD0\x4A\x92\xD1\xBE\xF6\x4A\xB7"
+ "\xCA\xA8\xA2\x9E\x13\x87\x57\x92"
+ "\x64\x7C\x85\x0B\xB3\x29\x37\xD8"
+ "\xE6\xAA\xAF\xC4\x03\x67\xA3\xBF"
+ "\x2E\x45\x83\xB6\xD8\x54\x00\x89"
+ "\xF6\xBC\x3A\x7A\x88\x58\x51\xED"
+ "\xF4\x4E\x01\xA5\xC3\x2E\xD9\x42"
+ "\xBD\x6E\x0D\x0B\x21\xB0\x1A\xCC"
+ "\xA4\xD3\x3F\xDC\x9B\x81\xD8\xF1"
+ "\xEA\x7A\x6A\xB7\x07\xC9\x6D\x91"
+ "\x6D\x3A\xF5\x5F\xA6\xFF\x87\x1E"
+ "\x3F\xDD\xC0\x72\xEA\xAC\x08\x15"
+ "\x21\xE6\xC6\xB6\x0D\xD8\x51\x86"
+ "\x2A\x03\x73\xF7\x29\xD4\xC4\xE4"
+ "\x7F\x95\x10\xF7\xAB\x3F\x92\x23"
+ "\xD3\xCE\x9C\x2E\x46\x3B\x63\x43"
+ "\xBB\xC2\x82\x7A\x83\xD5\x55\xE2"
+ "\xE7\x9B\x2F\x92\xAF\xFD\x81\x56"
+ "\x79\xFD\x3E\xF9\x46\xE0\x25\xD4"
+ "\x38\xDE\xBC\x2C\xC4\x7A\x2A\x8F"
+ "\x94\x4F\xD0\xAD\x9B\x37\x18\xD4"
+ "\x0E\x4D\x0F\x02\x3A\xDC\x5A\xA2"
+ "\x39\x25\x55\x20\x5A\xA6\x02\x9F"
+ "\xE6\x77\x21\x77\xE5\x4B\x7B\x0B"
+ "\x30\xF8\x5F\x33\x0F\x49\xCD\xFF"
+ "\xF2\xE4\x35\xF9\xF0\x63\xC3\x7E"
+ "\xF1\xA6\x73\xB4\xDF\xE7\xBB\x78"
+ "\xFF\x21\xA9\xF3\xF3\xCF\x5D\xBA"
+ "\xED\x87\x98\xAC\xFE\x48\x97\x6D"
+ "\xA6\x7F\x69\x31\xB1\xC4\xFF\x14"
+ "\xC6\x76\xD4\x10\xDD\xF6\x49\x2C"
+ "\x9C\xC8\x6D\x76\xC0\x8F\x5F\x55"
+ "\x2F\x3C\x8A\x30\xAA\xC3\x16\x55"
+ "\xC6\xFC\x8D\x8B\xB9\xE5\x80\x6C"
+ "\xC8\x7E\xBD\x65\x58\x36\xD5\xBC"
+ "\xF0\x33\x52\x29\x70\xF9\x5C\xE9"
+ "\xAC\x1F\xB5\x73\x56\x66\x54\xAF"
+ "\x1B\x8F\x7D\xED\xAB\x03\xCE\xE3"
+ "\xAE\x47\xB6\x69\x86\xE9\x01\x31"
+ "\x83\x18\x3D\xF4\x74\x7B\xF9\x42"
+ "\x4C\xFD\x75\x4A\x6D\xF0\x03\xA6"
+ "\x2B\x20\x63\xDA\x49\x65\x5E\x8B"
+ "\xC0\x19\xE3\x8D\xD9\xF3\xB0\x34"
+ "\xD3\x52\xFC\x68\x00\x43\x1B\x37"
+ "\x31\x93\x51\x1C\x63\x97\x70\xB0"
+ "\x99\x78\x83\x13\xFD\xCF\x53\x81"
+ "\x36\x46\xB5\x42\x52\x2F\x32\xEB"
+ "\x4A\x3D\xF1\x8F\x1C\x54\x2E\xFC"
+ "\x41\x75\x5A\x8C\x8E\x6F\xE7\x1A"
+ "\xAE\xEF\x3E\x82\x12\x0B\x74\x72"
+ "\xF8\xB2\xAA\x7A\xD6\xFF\xFA\x55"
+ "\x33\x1A\xBB\xD3\xA2\x7E\x97\x66",
+ .ilen = 1008,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06"
+ "\x9D\x34\xCB\x3F\xD6\x6D\x04\x78"
+ "\x0F\xA6\x1A\xB1\x48\xDF\x53\xEA"
+ "\x81\x18\x8C\x23\xBA\x2E\xC5\x5C"
+ "\xF3\x67\xFE\x95\x09\xA0\x37\xCE"
+ "\x42\xD9\x70\x07\x7B\x12\xA9\x1D"
+ "\xB4\x4B\xE2\x56\xED\x84\x1B\x8F"
+ "\x26\xBD\x31\xC8\x5F\xF6\x6A\x01"
+ "\x98\x0C\xA3\x3A\xD1\x45\xDC\x73"
+ "\x0A\x7E\x15\xAC\x20\xB7\x4E\xE5"
+ "\x59\xF0\x87\x1E\x92\x29\xC0\x34"
+ "\xCB\x62\xF9\x6D\x04\x9B\x0F\xA6"
+ "\x3D\xD4\x48\xDF\x76\x0D\x81\x18"
+ "\xAF\x23\xBA\x51\xE8\x5C\xF3\x8A"
+ "\x21\x95\x2C\xC3\x37\xCE\x65\xFC"
+ "\x70\x07\x9E\x12\xA9\x40\xD7\x4B"
+ "\xE2\x79\x10\x84\x1B\xB2\x26\xBD"
+ "\x54\xEB\x5F\xF6\x8D\x01\x98\x2F"
+ "\xC6\x3A\xD1\x68\xFF\x73\x0A\xA1"
+ "\x15\xAC\x43\xDA\x4E\xE5\x7C\x13"
+ "\x87\x1E\xB5\x29\xC0\x57\xEE\x62"
+ "\xF9\x90\x04\x9B\x32\xC9\x3D\xD4"
+ "\x6B\x02\x76\x0D\xA4\x18\xAF\x46"
+ "\xDD\x51\xE8\x7F\x16\x8A\x21\xB8"
+ "\x2C\xC3\x5A\xF1\x65\xFC\x93\x07"
+ "\x9E\x35\xCC\x40\xD7\x6E\x05\x79"
+ "\x10\xA7\x1B\xB2\x49\xE0\x54\xEB"
+ "\x82\x19\x8D\x24\xBB\x2F\xC6\x5D"
+ "\xF4\x68\xFF\x96\x0A\xA1\x38\xCF"
+ "\x43\xDA\x71\x08\x7C\x13\xAA\x1E"
+ "\xB5\x4C\xE3\x57\xEE\x85\x1C\x90"
+ "\x27\xBE\x32\xC9\x60\xF7\x6B\x02"
+ "\x99\x0D\xA4\x3B\xD2\x46\xDD\x74"
+ "\x0B\x7F\x16\xAD\x21\xB8\x4F\xE6"
+ "\x5A\xF1\x88\x1F\x93\x2A\xC1\x35"
+ "\xCC\x63\xFA\x6E\x05\x9C\x10\xA7"
+ "\x3E\xD5\x49\xE0\x77\x0E\x82\x19"
+ "\xB0\x24\xBB\x52\xE9\x5D\xF4\x8B"
+ "\x22\x96\x2D\xC4\x38\xCF\x66\xFD"
+ "\x71\x08\x9F\x13\xAA\x41\xD8\x4C"
+ "\xE3\x7A\x11\x85\x1C\xB3\x27\xBE"
+ "\x55\xEC\x60\xF7\x8E\x02\x99\x30"
+ "\xC7\x3B\xD2\x69\x00\x74\x0B\xA2"
+ "\x16\xAD\x44\xDB\x4F\xE6\x7D\x14"
+ "\x88\x1F\xB6\x2A\xC1\x58\xEF\x63"
+ "\xFA\x91\x05\x9C\x33\xCA\x3E\xD5"
+ "\x6C\x03\x77\x0E\xA5\x19\xB0\x47"
+ "\xDE\x52\xE9\x80\x17\x8B\x22\xB9"
+ "\x2D\xC4\x5B\xF2\x66\xFD\x94\x08"
+ "\x9F\x36\xCD\x41\xD8\x6F\x06\x7A"
+ "\x11\xA8\x1C\xB3\x4A\xE1\x55\xEC"
+ "\x83\x1A\x8E\x25\xBC\x30\xC7\x5E"
+ "\xF5\x69\x00\x97\x0B\xA2\x39\xD0"
+ "\x44\xDB\x72\x09\x7D\x14\xAB\x1F"
+ "\xB6\x4D\xE4\x58\xEF\x86\x1D\x91"
+ "\x28\xBF\x33\xCA\x61\xF8\x6C\x03"
+ "\x9A\x0E\xA5\x3C\xD3\x47\xDE\x75"
+ "\x0C\x80\x17\xAE\x22\xB9\x50\xE7"
+ "\x5B\xF2\x89\x20\x94\x2B\xC2\x36"
+ "\xCD\x64\xFB\x6F\x06\x9D\x11\xA8"
+ "\x3F\xD6\x4A\xE1\x78\x0F\x83\x1A"
+ "\xB1\x25\xBC\x53\xEA\x5E\xF5\x8C"
+ "\x00\x97\x2E\xC5\x39\xD0\x67\xFE"
+ "\x72\x09\xA0\x14\xAB\x42\xD9\x4D",
+ .rlen = 1008,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 1008 - 16, 16 },
},
};
@@ -6833,6 +23138,271 @@ static struct cipher_testvec camellia_cbc_enc_tv_template[] = {
"\x19\xb4\x3e\x57\x1c\x02\x5e\xa0"
"\x15\x78\xe0\x5e\xf2\xcb\x87\x16",
.rlen = 32,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06"
+ "\x9D\x34\xCB\x3F\xD6\x6D\x04\x78"
+ "\x0F\xA6\x1A\xB1\x48\xDF\x53\xEA"
+ "\x81\x18\x8C\x23\xBA\x2E\xC5\x5C"
+ "\xF3\x67\xFE\x95\x09\xA0\x37\xCE"
+ "\x42\xD9\x70\x07\x7B\x12\xA9\x1D"
+ "\xB4\x4B\xE2\x56\xED\x84\x1B\x8F"
+ "\x26\xBD\x31\xC8\x5F\xF6\x6A\x01"
+ "\x98\x0C\xA3\x3A\xD1\x45\xDC\x73"
+ "\x0A\x7E\x15\xAC\x20\xB7\x4E\xE5"
+ "\x59\xF0\x87\x1E\x92\x29\xC0\x34"
+ "\xCB\x62\xF9\x6D\x04\x9B\x0F\xA6"
+ "\x3D\xD4\x48\xDF\x76\x0D\x81\x18"
+ "\xAF\x23\xBA\x51\xE8\x5C\xF3\x8A"
+ "\x21\x95\x2C\xC3\x37\xCE\x65\xFC"
+ "\x70\x07\x9E\x12\xA9\x40\xD7\x4B"
+ "\xE2\x79\x10\x84\x1B\xB2\x26\xBD"
+ "\x54\xEB\x5F\xF6\x8D\x01\x98\x2F"
+ "\xC6\x3A\xD1\x68\xFF\x73\x0A\xA1"
+ "\x15\xAC\x43\xDA\x4E\xE5\x7C\x13"
+ "\x87\x1E\xB5\x29\xC0\x57\xEE\x62"
+ "\xF9\x90\x04\x9B\x32\xC9\x3D\xD4"
+ "\x6B\x02\x76\x0D\xA4\x18\xAF\x46"
+ "\xDD\x51\xE8\x7F\x16\x8A\x21\xB8"
+ "\x2C\xC3\x5A\xF1\x65\xFC\x93\x07"
+ "\x9E\x35\xCC\x40\xD7\x6E\x05\x79"
+ "\x10\xA7\x1B\xB2\x49\xE0\x54\xEB"
+ "\x82\x19\x8D\x24\xBB\x2F\xC6\x5D"
+ "\xF4\x68\xFF\x96\x0A\xA1\x38\xCF"
+ "\x43\xDA\x71\x08\x7C\x13\xAA\x1E"
+ "\xB5\x4C\xE3\x57\xEE\x85\x1C\x90"
+ "\x27\xBE\x32\xC9\x60\xF7\x6B\x02"
+ "\x99\x0D\xA4\x3B\xD2\x46\xDD\x74"
+ "\x0B\x7F\x16\xAD\x21\xB8\x4F\xE6"
+ "\x5A\xF1\x88\x1F\x93\x2A\xC1\x35"
+ "\xCC\x63\xFA\x6E\x05\x9C\x10\xA7"
+ "\x3E\xD5\x49\xE0\x77\x0E\x82\x19"
+ "\xB0\x24\xBB\x52\xE9\x5D\xF4\x8B"
+ "\x22\x96\x2D\xC4\x38\xCF\x66\xFD"
+ "\x71\x08\x9F\x13\xAA\x41\xD8\x4C"
+ "\xE3\x7A\x11\x85\x1C\xB3\x27\xBE"
+ "\x55\xEC\x60\xF7\x8E\x02\x99\x30"
+ "\xC7\x3B\xD2\x69\x00\x74\x0B\xA2"
+ "\x16\xAD\x44\xDB\x4F\xE6\x7D\x14"
+ "\x88\x1F\xB6\x2A\xC1\x58\xEF\x63"
+ "\xFA\x91\x05\x9C\x33\xCA\x3E\xD5"
+ "\x6C\x03\x77\x0E\xA5\x19\xB0\x47"
+ "\xDE\x52\xE9\x80\x17\x8B\x22\xB9"
+ "\x2D\xC4\x5B\xF2\x66\xFD\x94\x08"
+ "\x9F\x36\xCD\x41\xD8\x6F\x06\x7A"
+ "\x11\xA8\x1C\xB3\x4A\xE1\x55\xEC"
+ "\x83\x1A\x8E\x25\xBC\x30\xC7\x5E"
+ "\xF5\x69\x00\x97\x0B\xA2\x39\xD0"
+ "\x44\xDB\x72\x09\x7D\x14\xAB\x1F"
+ "\xB6\x4D\xE4\x58\xEF\x86\x1D\x91"
+ "\x28\xBF\x33\xCA\x61\xF8\x6C\x03"
+ "\x9A\x0E\xA5\x3C\xD3\x47\xDE\x75"
+ "\x0C\x80\x17\xAE\x22\xB9\x50\xE7"
+ "\x5B\xF2\x89\x20\x94\x2B\xC2\x36"
+ "\xCD\x64\xFB\x6F\x06\x9D\x11\xA8"
+ "\x3F\xD6\x4A\xE1\x78\x0F\x83\x1A"
+ "\xB1\x25\xBC\x53\xEA\x5E\xF5\x8C"
+ "\x00\x97\x2E\xC5\x39\xD0\x67\xFE"
+ "\x72\x09\xA0\x14\xAB\x42\xD9\x4D",
+ .ilen = 1008,
+ .result = "\xCD\x3E\x2A\x3B\x3E\x94\xC5\x77"
+ "\xBA\xBB\x5B\xB1\xDE\x7B\xA4\x40"
+ "\x88\x39\xE3\xFD\x94\x4B\x25\x58"
+ "\xE1\x4B\xC4\x18\x7A\xFD\x17\x2B"
+ "\xB9\xF9\xC2\x27\x6A\xB6\x31\x27"
+ "\xA6\xAD\xEF\xE5\x5D\xE4\x02\x01"
+ "\x56\x2E\x10\xC2\x2C\xFF\xC6\x83"
+ "\xB5\xDC\x4F\x63\xAD\x0E\x63\x5E"
+ "\x56\xC8\x18\x3D\x79\x86\x97\xEF"
+ "\x57\x0E\x63\xA1\xC1\x41\x48\xB8"
+ "\x98\xB7\x51\x6D\x18\xF6\x19\x82"
+ "\x37\x49\x88\xA4\xEF\x91\x21\x47"
+ "\x03\x28\xEA\x42\xF4\xFB\x7A\x58"
+ "\x28\x90\x77\x46\xD8\xD2\x35\x16"
+ "\x44\xA9\x9E\x49\x52\x2A\xE4\x16"
+ "\x5D\xF7\x65\xEB\x0F\xC9\x29\xE6"
+ "\xCF\x76\x91\x89\x8A\x94\x39\xFA"
+ "\x6B\x5F\x63\x53\x74\x43\x91\xF5"
+ "\x3F\xBC\x88\x53\xB2\x1A\x02\x3F"
+ "\x9D\x32\x84\xEB\x56\x28\xD6\x06"
+ "\xD5\xB2\x20\xA9\xFC\xC3\x76\x62"
+ "\x32\xCC\x86\xC8\x36\x67\x5E\x7E"
+ "\xA4\xAA\x15\x63\x6B\xA9\x86\xAF"
+ "\x1A\x52\x82\x36\x5F\xF4\x3F\x7A"
+ "\x9B\x78\x62\x3B\x02\x28\x60\xB3"
+ "\xBA\x82\xB1\xDD\xC9\x60\x8F\x47"
+ "\xF1\x6B\xFE\xE5\x39\x34\xA0\x28"
+ "\xA4\xB3\xC9\x7E\xED\x28\x8D\x70"
+ "\xB2\x1D\xFD\xC6\x00\xCF\x1A\x94"
+ "\x28\xF8\xC1\x34\xB7\x58\xA5\x6C"
+ "\x1A\x9D\xE4\xE4\xF6\xB9\xB4\xB0"
+ "\x5D\x51\x54\x9A\x53\xA0\xF9\x32"
+ "\xBD\x31\x54\x14\x7B\x33\xEE\x17"
+ "\xD3\xC7\x1F\x48\xBF\x0B\x22\xA2"
+ "\x7D\x0C\xDF\xD0\x2E\x98\xFA\xD2"
+ "\xFA\xCF\x24\x1D\x99\x9B\xD0\x7E"
+ "\xF4\x4F\x88\xFF\x45\x99\x4A\xF4"
+ "\xF2\x0A\x5B\x3B\x21\xAB\x92\xAE"
+ "\x40\x78\x91\x95\xC4\x2F\xA3\xE8"
+ "\x18\xC7\x07\xA6\xC8\xC0\x66\x33"
+ "\x35\xC0\xB4\xA0\xF8\xEE\x1E\xF3"
+ "\x40\xF5\x40\x54\xF1\x84\x8C\xEA"
+ "\x27\x38\x1F\xF8\x77\xC7\xDF\xD8"
+ "\x1D\xE2\xD9\x59\x40\x4F\x59\xD4"
+ "\xF8\x17\x99\x8D\x58\x2D\x72\x44"
+ "\x9D\x1D\x91\x64\xD6\x3F\x0A\x82"
+ "\xC7\x57\x3D\xEF\xD3\x41\xFA\xA7"
+ "\x68\xA3\xB8\xA5\x93\x74\x2E\x85"
+ "\x4C\x9D\x69\x59\xCE\x15\xAE\xBF"
+ "\x9C\x8F\x14\x64\x5D\x7F\xCF\x0B"
+ "\xCE\x43\x5D\x28\xC0\x2F\xFB\x18"
+ "\x79\x9A\xFC\x43\x16\x7C\x6B\x7B"
+ "\x38\xB8\x48\x36\x66\x4E\x20\x43"
+ "\xBA\x76\x13\x9A\xC3\xF2\xEB\x52"
+ "\xD7\xDC\xB2\x67\x63\x14\x25\xCD"
+ "\xB1\x13\x4B\xDE\x8C\x59\x21\x84"
+ "\x81\x8D\x97\x23\x45\x33\x7C\xF3"
+ "\xC5\xBC\x79\x95\xAA\x84\x68\x31"
+ "\x2D\x1A\x68\xFE\xEC\x92\x94\xDA"
+ "\x94\x2A\x6F\xD6\xFE\xE5\x76\x97"
+ "\xF4\x6E\xEE\xCB\x2B\x95\x4E\x36"
+ "\x5F\x74\x8C\x86\x5B\x71\xD0\x20"
+ "\x78\x1A\x7F\x18\x8C\xD9\xCD\xF5"
+ "\x21\x41\x56\x72\x13\xE1\x86\x07"
+ "\x07\x26\xF3\x4F\x7B\xEA\xB5\x18"
+ "\xFE\x94\x2D\x9F\xE0\x72\x18\x65"
+ "\xB2\xA5\x63\x48\xB4\x13\x22\xF7"
+ "\x25\xF1\x80\xA8\x7F\x54\x86\x7B"
+ "\x39\xAE\x95\x0C\x09\x32\x22\x2D"
+ "\x4D\x73\x39\x0C\x09\x2C\x7C\x10"
+ "\xD0\x4B\x53\xF6\x90\xC5\x99\x2F"
+ "\x15\xE1\x7F\xC6\xC5\x7A\x52\x14"
+ "\x65\xEE\x93\x54\xD0\x66\x15\x3C"
+ "\x4C\x68\xFD\x64\x0F\xF9\x10\x39"
+ "\x46\x7A\xDD\x97\x20\xEE\xC7\xD2"
+ "\x98\x4A\xB6\xE6\xF5\xA8\x1F\x4F"
+ "\xDB\xAB\x6D\xD5\x9B\x34\x16\x97"
+ "\x2F\x64\xE5\x37\xEF\x0E\xA1\xE9"
+ "\xBE\x31\x31\x96\x8B\x40\x18\x75"
+ "\x11\x75\x14\x32\xA5\x2D\x1B\x6B"
+ "\xDB\x59\xEB\xFA\x3D\x8E\x7C\xC4"
+ "\xDE\x68\xC8\x9F\xC9\x99\xE3\xC6"
+ "\x71\xB0\x12\x57\x89\x0D\xC0\x2B"
+ "\x9F\x12\x6A\x04\x67\xF1\x95\x31"
+ "\x59\xFD\x84\x95\x2C\x9C\x5B\xEC"
+ "\x09\xB0\x43\x96\x4A\x64\x80\x40"
+ "\xB9\x72\x19\xDD\x70\x42\xFA\xB1"
+ "\x4A\x2C\x0C\x0A\x60\x6E\xE3\x7C"
+ "\x37\x5A\xBE\xA4\x62\xCF\x29\xAB"
+ "\x7F\x4D\xA6\xB3\xE2\xB6\x64\xC6"
+ "\x33\x0B\xF3\xD5\x01\x38\x74\xA4"
+ "\x67\x1E\x75\x68\xC3\xAD\x76\xE9"
+ "\xE9\xBC\xF0\xEB\xD8\xFD\x31\x8A"
+ "\x5F\xC9\x18\x94\x4B\x86\x66\xFC"
+ "\xBD\x0B\x3D\xB3\x9F\xFA\x1F\xD9"
+ "\x78\xC4\xE3\x24\x1C\x67\xA2\xF8"
+ "\x43\xBC\x76\x75\xBF\x6C\x05\xB3"
+ "\x32\xE8\x7C\x80\xDB\xC7\xB6\x61"
+ "\x1A\x3E\x2B\xA7\x25\xED\x8F\xA0"
+ "\x00\x4B\xF8\x90\xCA\xD8\xFB\x12"
+ "\xAC\x1F\x18\xE9\xD2\x5E\xA2\x8E"
+ "\xE4\x84\x6B\x9D\xEB\x1E\x6B\xA3"
+ "\x7B\xDC\xCE\x15\x97\x27\xB2\x65"
+ "\xBC\x0E\x47\xAB\x55\x13\x53\xAB"
+ "\x0E\x34\x55\x02\x5F\x27\xC5\x89"
+ "\xDF\xC5\x70\xC4\xDD\x76\x82\xEE"
+ "\x68\xA6\x09\xB0\xE5\x5E\xF1\x0C"
+ "\xE3\xF3\x09\x9B\xFE\x65\x4B\xB8"
+ "\x30\xEC\xD5\x7C\x6A\xEC\x1D\xD2"
+ "\x93\xB7\xA1\x1A\x02\xD4\xC0\xD6"
+ "\x8D\x4D\x83\x9A\xED\x29\x4E\x14"
+ "\x86\xD5\x3C\x1A\xD5\xB9\x0A\x6A"
+ "\x72\x22\xD5\x92\x38\xF1\xA1\x86"
+ "\xB2\x41\x51\xCA\x4E\xAB\x8F\xD3"
+ "\x80\x56\xC3\xD7\x65\xE1\xB3\x86"
+ "\xCB\xCE\x98\xA1\xD4\x59\x1C\x06"
+ "\x01\xED\xF8\x29\x91\x19\x5C\x9A"
+ "\xEE\x28\x1B\x48\xD7\x32\xEF\x9F"
+ "\x6C\x2B\x66\x4E\x78\xD5\x8B\x72"
+ "\x80\xE7\x29\xDC\x23\x55\x98\x54"
+ "\xB1\xFF\x3E\x95\x56\xA8\x78\x78"
+ "\xEF\xC4\xA5\x11\x2D\x2B\xD8\x93"
+ "\x30\x6E\x7E\x51\xBB\x42\x5F\x03"
+ "\x43\x94\x23\x7E\xEE\xF0\xA5\x79"
+ "\x55\x01\xD4\x58\xB2\xF2\x85\x49"
+ "\x70\xC5\xB9\x0B\x3B\x7A\x6E\x6C",
+ .rlen = 1008,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 1008 - 16, 16 },
},
};
@@ -6864,6 +23434,2795 @@ static struct cipher_testvec camellia_cbc_dec_tv_template[] = {
"\x10\x11\x12\x13\x14\x15\x16\x17"
"\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f",
.rlen = 32,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\xCD\x3E\x2A\x3B\x3E\x94\xC5\x77"
+ "\xBA\xBB\x5B\xB1\xDE\x7B\xA4\x40"
+ "\x88\x39\xE3\xFD\x94\x4B\x25\x58"
+ "\xE1\x4B\xC4\x18\x7A\xFD\x17\x2B"
+ "\xB9\xF9\xC2\x27\x6A\xB6\x31\x27"
+ "\xA6\xAD\xEF\xE5\x5D\xE4\x02\x01"
+ "\x56\x2E\x10\xC2\x2C\xFF\xC6\x83"
+ "\xB5\xDC\x4F\x63\xAD\x0E\x63\x5E"
+ "\x56\xC8\x18\x3D\x79\x86\x97\xEF"
+ "\x57\x0E\x63\xA1\xC1\x41\x48\xB8"
+ "\x98\xB7\x51\x6D\x18\xF6\x19\x82"
+ "\x37\x49\x88\xA4\xEF\x91\x21\x47"
+ "\x03\x28\xEA\x42\xF4\xFB\x7A\x58"
+ "\x28\x90\x77\x46\xD8\xD2\x35\x16"
+ "\x44\xA9\x9E\x49\x52\x2A\xE4\x16"
+ "\x5D\xF7\x65\xEB\x0F\xC9\x29\xE6"
+ "\xCF\x76\x91\x89\x8A\x94\x39\xFA"
+ "\x6B\x5F\x63\x53\x74\x43\x91\xF5"
+ "\x3F\xBC\x88\x53\xB2\x1A\x02\x3F"
+ "\x9D\x32\x84\xEB\x56\x28\xD6\x06"
+ "\xD5\xB2\x20\xA9\xFC\xC3\x76\x62"
+ "\x32\xCC\x86\xC8\x36\x67\x5E\x7E"
+ "\xA4\xAA\x15\x63\x6B\xA9\x86\xAF"
+ "\x1A\x52\x82\x36\x5F\xF4\x3F\x7A"
+ "\x9B\x78\x62\x3B\x02\x28\x60\xB3"
+ "\xBA\x82\xB1\xDD\xC9\x60\x8F\x47"
+ "\xF1\x6B\xFE\xE5\x39\x34\xA0\x28"
+ "\xA4\xB3\xC9\x7E\xED\x28\x8D\x70"
+ "\xB2\x1D\xFD\xC6\x00\xCF\x1A\x94"
+ "\x28\xF8\xC1\x34\xB7\x58\xA5\x6C"
+ "\x1A\x9D\xE4\xE4\xF6\xB9\xB4\xB0"
+ "\x5D\x51\x54\x9A\x53\xA0\xF9\x32"
+ "\xBD\x31\x54\x14\x7B\x33\xEE\x17"
+ "\xD3\xC7\x1F\x48\xBF\x0B\x22\xA2"
+ "\x7D\x0C\xDF\xD0\x2E\x98\xFA\xD2"
+ "\xFA\xCF\x24\x1D\x99\x9B\xD0\x7E"
+ "\xF4\x4F\x88\xFF\x45\x99\x4A\xF4"
+ "\xF2\x0A\x5B\x3B\x21\xAB\x92\xAE"
+ "\x40\x78\x91\x95\xC4\x2F\xA3\xE8"
+ "\x18\xC7\x07\xA6\xC8\xC0\x66\x33"
+ "\x35\xC0\xB4\xA0\xF8\xEE\x1E\xF3"
+ "\x40\xF5\x40\x54\xF1\x84\x8C\xEA"
+ "\x27\x38\x1F\xF8\x77\xC7\xDF\xD8"
+ "\x1D\xE2\xD9\x59\x40\x4F\x59\xD4"
+ "\xF8\x17\x99\x8D\x58\x2D\x72\x44"
+ "\x9D\x1D\x91\x64\xD6\x3F\x0A\x82"
+ "\xC7\x57\x3D\xEF\xD3\x41\xFA\xA7"
+ "\x68\xA3\xB8\xA5\x93\x74\x2E\x85"
+ "\x4C\x9D\x69\x59\xCE\x15\xAE\xBF"
+ "\x9C\x8F\x14\x64\x5D\x7F\xCF\x0B"
+ "\xCE\x43\x5D\x28\xC0\x2F\xFB\x18"
+ "\x79\x9A\xFC\x43\x16\x7C\x6B\x7B"
+ "\x38\xB8\x48\x36\x66\x4E\x20\x43"
+ "\xBA\x76\x13\x9A\xC3\xF2\xEB\x52"
+ "\xD7\xDC\xB2\x67\x63\x14\x25\xCD"
+ "\xB1\x13\x4B\xDE\x8C\x59\x21\x84"
+ "\x81\x8D\x97\x23\x45\x33\x7C\xF3"
+ "\xC5\xBC\x79\x95\xAA\x84\x68\x31"
+ "\x2D\x1A\x68\xFE\xEC\x92\x94\xDA"
+ "\x94\x2A\x6F\xD6\xFE\xE5\x76\x97"
+ "\xF4\x6E\xEE\xCB\x2B\x95\x4E\x36"
+ "\x5F\x74\x8C\x86\x5B\x71\xD0\x20"
+ "\x78\x1A\x7F\x18\x8C\xD9\xCD\xF5"
+ "\x21\x41\x56\x72\x13\xE1\x86\x07"
+ "\x07\x26\xF3\x4F\x7B\xEA\xB5\x18"
+ "\xFE\x94\x2D\x9F\xE0\x72\x18\x65"
+ "\xB2\xA5\x63\x48\xB4\x13\x22\xF7"
+ "\x25\xF1\x80\xA8\x7F\x54\x86\x7B"
+ "\x39\xAE\x95\x0C\x09\x32\x22\x2D"
+ "\x4D\x73\x39\x0C\x09\x2C\x7C\x10"
+ "\xD0\x4B\x53\xF6\x90\xC5\x99\x2F"
+ "\x15\xE1\x7F\xC6\xC5\x7A\x52\x14"
+ "\x65\xEE\x93\x54\xD0\x66\x15\x3C"
+ "\x4C\x68\xFD\x64\x0F\xF9\x10\x39"
+ "\x46\x7A\xDD\x97\x20\xEE\xC7\xD2"
+ "\x98\x4A\xB6\xE6\xF5\xA8\x1F\x4F"
+ "\xDB\xAB\x6D\xD5\x9B\x34\x16\x97"
+ "\x2F\x64\xE5\x37\xEF\x0E\xA1\xE9"
+ "\xBE\x31\x31\x96\x8B\x40\x18\x75"
+ "\x11\x75\x14\x32\xA5\x2D\x1B\x6B"
+ "\xDB\x59\xEB\xFA\x3D\x8E\x7C\xC4"
+ "\xDE\x68\xC8\x9F\xC9\x99\xE3\xC6"
+ "\x71\xB0\x12\x57\x89\x0D\xC0\x2B"
+ "\x9F\x12\x6A\x04\x67\xF1\x95\x31"
+ "\x59\xFD\x84\x95\x2C\x9C\x5B\xEC"
+ "\x09\xB0\x43\x96\x4A\x64\x80\x40"
+ "\xB9\x72\x19\xDD\x70\x42\xFA\xB1"
+ "\x4A\x2C\x0C\x0A\x60\x6E\xE3\x7C"
+ "\x37\x5A\xBE\xA4\x62\xCF\x29\xAB"
+ "\x7F\x4D\xA6\xB3\xE2\xB6\x64\xC6"
+ "\x33\x0B\xF3\xD5\x01\x38\x74\xA4"
+ "\x67\x1E\x75\x68\xC3\xAD\x76\xE9"
+ "\xE9\xBC\xF0\xEB\xD8\xFD\x31\x8A"
+ "\x5F\xC9\x18\x94\x4B\x86\x66\xFC"
+ "\xBD\x0B\x3D\xB3\x9F\xFA\x1F\xD9"
+ "\x78\xC4\xE3\x24\x1C\x67\xA2\xF8"
+ "\x43\xBC\x76\x75\xBF\x6C\x05\xB3"
+ "\x32\xE8\x7C\x80\xDB\xC7\xB6\x61"
+ "\x1A\x3E\x2B\xA7\x25\xED\x8F\xA0"
+ "\x00\x4B\xF8\x90\xCA\xD8\xFB\x12"
+ "\xAC\x1F\x18\xE9\xD2\x5E\xA2\x8E"
+ "\xE4\x84\x6B\x9D\xEB\x1E\x6B\xA3"
+ "\x7B\xDC\xCE\x15\x97\x27\xB2\x65"
+ "\xBC\x0E\x47\xAB\x55\x13\x53\xAB"
+ "\x0E\x34\x55\x02\x5F\x27\xC5\x89"
+ "\xDF\xC5\x70\xC4\xDD\x76\x82\xEE"
+ "\x68\xA6\x09\xB0\xE5\x5E\xF1\x0C"
+ "\xE3\xF3\x09\x9B\xFE\x65\x4B\xB8"
+ "\x30\xEC\xD5\x7C\x6A\xEC\x1D\xD2"
+ "\x93\xB7\xA1\x1A\x02\xD4\xC0\xD6"
+ "\x8D\x4D\x83\x9A\xED\x29\x4E\x14"
+ "\x86\xD5\x3C\x1A\xD5\xB9\x0A\x6A"
+ "\x72\x22\xD5\x92\x38\xF1\xA1\x86"
+ "\xB2\x41\x51\xCA\x4E\xAB\x8F\xD3"
+ "\x80\x56\xC3\xD7\x65\xE1\xB3\x86"
+ "\xCB\xCE\x98\xA1\xD4\x59\x1C\x06"
+ "\x01\xED\xF8\x29\x91\x19\x5C\x9A"
+ "\xEE\x28\x1B\x48\xD7\x32\xEF\x9F"
+ "\x6C\x2B\x66\x4E\x78\xD5\x8B\x72"
+ "\x80\xE7\x29\xDC\x23\x55\x98\x54"
+ "\xB1\xFF\x3E\x95\x56\xA8\x78\x78"
+ "\xEF\xC4\xA5\x11\x2D\x2B\xD8\x93"
+ "\x30\x6E\x7E\x51\xBB\x42\x5F\x03"
+ "\x43\x94\x23\x7E\xEE\xF0\xA5\x79"
+ "\x55\x01\xD4\x58\xB2\xF2\x85\x49"
+ "\x70\xC5\xB9\x0B\x3B\x7A\x6E\x6C",
+ .ilen = 1008,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06"
+ "\x9D\x34\xCB\x3F\xD6\x6D\x04\x78"
+ "\x0F\xA6\x1A\xB1\x48\xDF\x53\xEA"
+ "\x81\x18\x8C\x23\xBA\x2E\xC5\x5C"
+ "\xF3\x67\xFE\x95\x09\xA0\x37\xCE"
+ "\x42\xD9\x70\x07\x7B\x12\xA9\x1D"
+ "\xB4\x4B\xE2\x56\xED\x84\x1B\x8F"
+ "\x26\xBD\x31\xC8\x5F\xF6\x6A\x01"
+ "\x98\x0C\xA3\x3A\xD1\x45\xDC\x73"
+ "\x0A\x7E\x15\xAC\x20\xB7\x4E\xE5"
+ "\x59\xF0\x87\x1E\x92\x29\xC0\x34"
+ "\xCB\x62\xF9\x6D\x04\x9B\x0F\xA6"
+ "\x3D\xD4\x48\xDF\x76\x0D\x81\x18"
+ "\xAF\x23\xBA\x51\xE8\x5C\xF3\x8A"
+ "\x21\x95\x2C\xC3\x37\xCE\x65\xFC"
+ "\x70\x07\x9E\x12\xA9\x40\xD7\x4B"
+ "\xE2\x79\x10\x84\x1B\xB2\x26\xBD"
+ "\x54\xEB\x5F\xF6\x8D\x01\x98\x2F"
+ "\xC6\x3A\xD1\x68\xFF\x73\x0A\xA1"
+ "\x15\xAC\x43\xDA\x4E\xE5\x7C\x13"
+ "\x87\x1E\xB5\x29\xC0\x57\xEE\x62"
+ "\xF9\x90\x04\x9B\x32\xC9\x3D\xD4"
+ "\x6B\x02\x76\x0D\xA4\x18\xAF\x46"
+ "\xDD\x51\xE8\x7F\x16\x8A\x21\xB8"
+ "\x2C\xC3\x5A\xF1\x65\xFC\x93\x07"
+ "\x9E\x35\xCC\x40\xD7\x6E\x05\x79"
+ "\x10\xA7\x1B\xB2\x49\xE0\x54\xEB"
+ "\x82\x19\x8D\x24\xBB\x2F\xC6\x5D"
+ "\xF4\x68\xFF\x96\x0A\xA1\x38\xCF"
+ "\x43\xDA\x71\x08\x7C\x13\xAA\x1E"
+ "\xB5\x4C\xE3\x57\xEE\x85\x1C\x90"
+ "\x27\xBE\x32\xC9\x60\xF7\x6B\x02"
+ "\x99\x0D\xA4\x3B\xD2\x46\xDD\x74"
+ "\x0B\x7F\x16\xAD\x21\xB8\x4F\xE6"
+ "\x5A\xF1\x88\x1F\x93\x2A\xC1\x35"
+ "\xCC\x63\xFA\x6E\x05\x9C\x10\xA7"
+ "\x3E\xD5\x49\xE0\x77\x0E\x82\x19"
+ "\xB0\x24\xBB\x52\xE9\x5D\xF4\x8B"
+ "\x22\x96\x2D\xC4\x38\xCF\x66\xFD"
+ "\x71\x08\x9F\x13\xAA\x41\xD8\x4C"
+ "\xE3\x7A\x11\x85\x1C\xB3\x27\xBE"
+ "\x55\xEC\x60\xF7\x8E\x02\x99\x30"
+ "\xC7\x3B\xD2\x69\x00\x74\x0B\xA2"
+ "\x16\xAD\x44\xDB\x4F\xE6\x7D\x14"
+ "\x88\x1F\xB6\x2A\xC1\x58\xEF\x63"
+ "\xFA\x91\x05\x9C\x33\xCA\x3E\xD5"
+ "\x6C\x03\x77\x0E\xA5\x19\xB0\x47"
+ "\xDE\x52\xE9\x80\x17\x8B\x22\xB9"
+ "\x2D\xC4\x5B\xF2\x66\xFD\x94\x08"
+ "\x9F\x36\xCD\x41\xD8\x6F\x06\x7A"
+ "\x11\xA8\x1C\xB3\x4A\xE1\x55\xEC"
+ "\x83\x1A\x8E\x25\xBC\x30\xC7\x5E"
+ "\xF5\x69\x00\x97\x0B\xA2\x39\xD0"
+ "\x44\xDB\x72\x09\x7D\x14\xAB\x1F"
+ "\xB6\x4D\xE4\x58\xEF\x86\x1D\x91"
+ "\x28\xBF\x33\xCA\x61\xF8\x6C\x03"
+ "\x9A\x0E\xA5\x3C\xD3\x47\xDE\x75"
+ "\x0C\x80\x17\xAE\x22\xB9\x50\xE7"
+ "\x5B\xF2\x89\x20\x94\x2B\xC2\x36"
+ "\xCD\x64\xFB\x6F\x06\x9D\x11\xA8"
+ "\x3F\xD6\x4A\xE1\x78\x0F\x83\x1A"
+ "\xB1\x25\xBC\x53\xEA\x5E\xF5\x8C"
+ "\x00\x97\x2E\xC5\x39\xD0\x67\xFE"
+ "\x72\x09\xA0\x14\xAB\x42\xD9\x4D",
+ .rlen = 1008,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 1008 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec camellia_ctr_enc_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .ilen = 496,
+ .result = "\xF3\x06\x3A\x84\xCD\xBA\x8E\x11"
+ "\xB7\x74\x6F\x5C\x97\xFB\x36\xFE"
+ "\xDE\x71\x58\xD4\x15\xD1\xC1\xA4"
+ "\xC9\x28\x74\xA6\x6B\xC7\x95\xA6"
+ "\x6C\x77\xF7\x2F\xDF\xC7\xBB\x85"
+ "\x60\xFC\xE8\x94\xE8\xB5\x09\x2C"
+ "\x1E\x43\xEF\x6C\xE9\x98\xC5\xA0"
+ "\x7B\x13\xE5\x7F\xF8\x49\x9A\x8C"
+ "\xE6\x7B\x08\xC3\x32\x66\x55\x4E"
+ "\xA5\x44\x1D\x2C\x18\xC7\x29\x1F"
+ "\x61\x28\x4A\xE3\xCD\xE5\x47\xB2"
+ "\x82\x2F\x66\x83\x91\x51\xAE\xD7"
+ "\x1C\x91\x3C\x57\xE3\x1D\x5A\xC9"
+ "\xFD\xC5\x58\x58\xEF\xCC\x33\xC9"
+ "\x0F\xEA\x26\x32\xD1\x15\x19\x2D"
+ "\x25\xB4\x7F\xB0\xDF\xFB\x88\x60"
+ "\x4E\x4D\x06\x7D\xCC\x1F\xED\x3B"
+ "\x68\x84\xD5\xB3\x1B\xE7\xB9\xA1"
+ "\x68\x8B\x2C\x1A\x44\xDA\x63\xD3"
+ "\x29\xE9\x59\x32\x1F\x30\x1C\x43"
+ "\xEA\x3A\xA3\x6B\x54\x3C\xAA\x11"
+ "\xAD\x38\x20\xC9\xB9\x8A\x64\x66"
+ "\x5A\x07\x49\xDF\xA1\x9C\xF9\x76"
+ "\x36\x65\xB6\x81\x8F\x76\x09\xE5"
+ "\xEB\xD1\x29\xA4\xE4\xF4\x4C\xCD"
+ "\xAF\xFC\xB9\x16\xD9\xC3\x73\x6A"
+ "\x33\x12\xF8\x7E\xBC\xCC\x7D\x80"
+ "\xBF\x3C\x25\x06\x13\x84\xFA\x35"
+ "\xF7\x40\xFA\xA1\x44\x13\x70\xD8"
+ "\x01\xF9\x85\x15\x63\xEC\x7D\xB9"
+ "\x02\xD8\xBA\x41\x6C\x92\x68\x66"
+ "\x95\xDD\xD6\x42\xE7\xBB\xE1\xFD"
+ "\x28\x3E\x94\xB6\xBD\xA7\xBF\x47"
+ "\x58\x8D\xFF\x19\x30\x75\x0D\x48"
+ "\x94\xE9\xA6\xCD\xB3\x8E\x1E\xCD"
+ "\x59\xBC\x1A\xAC\x3C\x4F\xA9\xEB"
+ "\xF4\xA7\xE4\x75\x4A\x18\x40\xC9"
+ "\x1E\xEC\x06\x9C\x28\x4B\xF7\x2B"
+ "\xE2\xEF\xD6\x42\x2E\xBB\xFC\x0A"
+ "\x79\xA2\x99\x28\x93\x1B\x00\x57"
+ "\x35\x1E\x1A\x93\x90\xA4\x68\x95"
+ "\x5E\x57\x40\xD5\xA9\xAA\x19\x48"
+ "\xEC\xFF\x76\x77\xDC\x78\x89\x76"
+ "\xE5\x3B\x00\xEC\x58\x4D\xD1\xE3"
+ "\xC8\x6C\x2C\x45\x5E\x5F\xD9\x4E"
+ "\x71\xA5\x36\x6D\x03\xF1\xC7\xD5"
+ "\xF3\x63\xC0\xD8\xCB\x2B\xF1\xA8"
+ "\xB9\x2B\xE6\x0B\xB9\x65\x78\xA0"
+ "\xC4\x46\xE6\x9B\x8B\x43\x2D\xAB"
+ "\x70\xA6\xE0\x59\x1E\xAC\x9D\xE0"
+ "\x76\x44\x45\xF3\x24\x11\x57\x98"
+ "\x9A\x86\xB4\x12\x80\x28\x86\x20"
+ "\x23\x9D\x2D\xE9\x38\x32\xB1\xE1"
+ "\xCF\x0A\x23\x73\x7D\xC5\x80\x3D"
+ "\x9F\x6D\xA0\xD0\xEE\x93\x8A\x79"
+ "\x3A\xDD\x1D\xBB\x9E\x26\x5D\x01"
+ "\x44\xD0\xD4\x4E\xC3\xF1\xE4\x38"
+ "\x09\x62\x0A\x1A\x4E\xD2\x63\x0F"
+ "\x6E\x3E\xD2\xA4\x3A\xF4\xF3\xFF"
+ "\x7E\x42\xEC\xB6\x6F\x4D\x6B\x48"
+ "\xE6\xA6\x50\x80\x78\x9E\xF1\xB0"
+ "\x4D\xB2\x0D\x3D\xFC\x40\x25\x4D",
+ .rlen = 496,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06"
+ "\x9D\x34\xCB\x3F\xD6\x6D\x04\x78"
+ "\x0F\xA6\x1A\xB1\x48\xDF\x53\xEA"
+ "\x81\x18\x8C\x23\xBA\x2E\xC5\x5C"
+ "\xF3\x67\xFE\x95\x09\xA0\x37\xCE"
+ "\x42\xD9\x70\x07\x7B\x12\xA9\x1D"
+ "\xB4\x4B\xE2\x56\xED\x84\x1B\x8F"
+ "\x26\xBD\x31\xC8\x5F\xF6\x6A\x01"
+ "\x98\x0C\xA3\x3A\xD1\x45\xDC\x73"
+ "\x0A\x7E\x15\xAC\x20\xB7\x4E\xE5"
+ "\x59\xF0\x87\x1E\x92\x29\xC0\x34"
+ "\xCB\x62\xF9\x6D\x04\x9B\x0F\xA6"
+ "\x3D\xD4\x48\xDF\x76\x0D\x81\x18"
+ "\xAF\x23\xBA\x51\xE8\x5C\xF3\x8A"
+ "\x21\x95\x2C\xC3\x37\xCE\x65\xFC"
+ "\x70\x07\x9E\x12\xA9\x40\xD7\x4B"
+ "\xE2\x79\x10\x84\x1B\xB2\x26\xBD"
+ "\x54\xEB\x5F\xF6\x8D\x01\x98\x2F"
+ "\xC6\x3A\xD1\x68\xFF\x73\x0A\xA1"
+ "\x15\xAC\x43\xDA\x4E\xE5\x7C\x13"
+ "\x87\x1E\xB5\x29\xC0\x57\xEE\x62"
+ "\xF9\x90\x04\x9B\x32\xC9\x3D\xD4"
+ "\x6B\x02\x76\x0D\xA4\x18\xAF\x46"
+ "\xDD\x51\xE8\x7F\x16\x8A\x21\xB8"
+ "\x2C\xC3\x5A\xF1\x65\xFC\x93\x07"
+ "\x9E\x35\xCC\x40\xD7\x6E\x05\x79"
+ "\x10\xA7\x1B\xB2\x49\xE0\x54\xEB"
+ "\x82\x19\x8D\x24\xBB\x2F\xC6\x5D"
+ "\xF4\x68\xFF\x96\x0A\xA1\x38\xCF"
+ "\x43\xDA\x71\x08\x7C\x13\xAA\x1E"
+ "\xB5\x4C\xE3\x57\xEE\x85\x1C\x90"
+ "\x27\xBE\x32\xC9\x60\xF7\x6B\x02"
+ "\x99\x0D\xA4\x3B\xD2\x46\xDD\x74"
+ "\x0B\x7F\x16\xAD\x21\xB8\x4F\xE6"
+ "\x5A\xF1\x88\x1F\x93\x2A\xC1\x35"
+ "\xCC\x63\xFA\x6E\x05\x9C\x10\xA7"
+ "\x3E\xD5\x49\xE0\x77\x0E\x82\x19"
+ "\xB0\x24\xBB\x52\xE9\x5D\xF4\x8B"
+ "\x22\x96\x2D\xC4\x38\xCF\x66\xFD"
+ "\x71\x08\x9F\x13\xAA\x41\xD8\x4C"
+ "\xE3\x7A\x11\x85\x1C\xB3\x27\xBE"
+ "\x55\xEC\x60\xF7\x8E\x02\x99\x30"
+ "\xC7\x3B\xD2\x69\x00\x74\x0B\xA2"
+ "\x16\xAD\x44\xDB\x4F\xE6\x7D\x14"
+ "\x88\x1F\xB6\x2A\xC1\x58\xEF\x63"
+ "\xFA\x91\x05\x9C\x33\xCA\x3E\xD5"
+ "\x6C\x03\x77\x0E\xA5\x19\xB0\x47"
+ "\xDE\x52\xE9\x80\x17\x8B\x22\xB9"
+ "\x2D\xC4\x5B\xF2\x66\xFD\x94\x08"
+ "\x9F\x36\xCD\x41\xD8\x6F\x06\x7A"
+ "\x11\xA8\x1C\xB3\x4A\xE1\x55\xEC"
+ "\x83\x1A\x8E\x25\xBC\x30\xC7\x5E"
+ "\xF5\x69\x00\x97\x0B\xA2\x39\xD0"
+ "\x44\xDB\x72\x09\x7D\x14\xAB\x1F"
+ "\xB6\x4D\xE4\x58\xEF\x86\x1D\x91"
+ "\x28\xBF\x33\xCA\x61\xF8\x6C\x03"
+ "\x9A\x0E\xA5\x3C\xD3\x47\xDE\x75"
+ "\x0C\x80\x17\xAE\x22\xB9\x50\xE7"
+ "\x5B\xF2\x89\x20\x94\x2B\xC2\x36"
+ "\xCD\x64\xFB\x6F\x06\x9D\x11\xA8"
+ "\x3F\xD6\x4A\xE1\x78\x0F\x83\x1A"
+ "\xB1\x25\xBC\x53\xEA\x5E\xF5\x8C"
+ "\x00\x97\x2E\xC5\x39\xD0\x67\xFE"
+ "\x72\x09\xA0\x14\xAB\x42\xD9\x4D"
+ "\xE4\x7B\x12",
+ .ilen = 1011,
+ .result = "\xF3\x06\x3A\x84\xCD\xBA\x8E\x11"
+ "\xB7\x74\x6F\x5C\x97\xFB\x36\xFE"
+ "\xDE\x71\x58\xD4\x15\xD1\xC1\xA4"
+ "\xC9\x28\x74\xA6\x6B\xC7\x95\xA6"
+ "\x6C\x77\xF7\x2F\xDF\xC7\xBB\x85"
+ "\x60\xFC\xE8\x94\xE8\xB5\x09\x2C"
+ "\x1E\x43\xEF\x6C\xE9\x98\xC5\xA0"
+ "\x7B\x13\xE5\x7F\xF8\x49\x9A\x8C"
+ "\xE6\x7B\x08\xC3\x32\x66\x55\x4E"
+ "\xA5\x44\x1D\x2C\x18\xC7\x29\x1F"
+ "\x61\x28\x4A\xE3\xCD\xE5\x47\xB2"
+ "\x82\x2F\x66\x83\x91\x51\xAE\xD7"
+ "\x1C\x91\x3C\x57\xE3\x1D\x5A\xC9"
+ "\xFD\xC5\x58\x58\xEF\xCC\x33\xC9"
+ "\x0F\xEA\x26\x32\xD1\x15\x19\x2D"
+ "\x25\xB4\x7F\xB0\xDF\xFB\x88\x60"
+ "\x4E\x4D\x06\x7D\xCC\x1F\xED\x3B"
+ "\x68\x84\xD5\xB3\x1B\xE7\xB9\xA1"
+ "\x68\x8B\x2C\x1A\x44\xDA\x63\xD3"
+ "\x29\xE9\x59\x32\x1F\x30\x1C\x43"
+ "\xEA\x3A\xA3\x6B\x54\x3C\xAA\x11"
+ "\xAD\x38\x20\xC9\xB9\x8A\x64\x66"
+ "\x5A\x07\x49\xDF\xA1\x9C\xF9\x76"
+ "\x36\x65\xB6\x81\x8F\x76\x09\xE5"
+ "\xEB\xD1\x29\xA4\xE4\xF4\x4C\xCD"
+ "\xAF\xFC\xB9\x16\xD9\xC3\x73\x6A"
+ "\x33\x12\xF8\x7E\xBC\xCC\x7D\x80"
+ "\xBF\x3C\x25\x06\x13\x84\xFA\x35"
+ "\xF7\x40\xFA\xA1\x44\x13\x70\xD8"
+ "\x01\xF9\x85\x15\x63\xEC\x7D\xB9"
+ "\x02\xD8\xBA\x41\x6C\x92\x68\x66"
+ "\x95\xDD\xD6\x42\xE7\xBB\xE1\xFD"
+ "\x28\x3E\x94\xB6\xBD\xA7\xBF\x47"
+ "\x58\x8D\xFF\x19\x30\x75\x0D\x48"
+ "\x94\xE9\xA6\xCD\xB3\x8E\x1E\xCD"
+ "\x59\xBC\x1A\xAC\x3C\x4F\xA9\xEB"
+ "\xF4\xA7\xE4\x75\x4A\x18\x40\xC9"
+ "\x1E\xEC\x06\x9C\x28\x4B\xF7\x2B"
+ "\xE2\xEF\xD6\x42\x2E\xBB\xFC\x0A"
+ "\x79\xA2\x99\x28\x93\x1B\x00\x57"
+ "\x35\x1E\x1A\x93\x90\xA4\x68\x95"
+ "\x5E\x57\x40\xD5\xA9\xAA\x19\x48"
+ "\xEC\xFF\x76\x77\xDC\x78\x89\x76"
+ "\xE5\x3B\x00\xEC\x58\x4D\xD1\xE3"
+ "\xC8\x6C\x2C\x45\x5E\x5F\xD9\x4E"
+ "\x71\xA5\x36\x6D\x03\xF1\xC7\xD5"
+ "\xF3\x63\xC0\xD8\xCB\x2B\xF1\xA8"
+ "\xB9\x2B\xE6\x0B\xB9\x65\x78\xA0"
+ "\xC4\x46\xE6\x9B\x8B\x43\x2D\xAB"
+ "\x70\xA6\xE0\x59\x1E\xAC\x9D\xE0"
+ "\x76\x44\x45\xF3\x24\x11\x57\x98"
+ "\x9A\x86\xB4\x12\x80\x28\x86\x20"
+ "\x23\x9D\x2D\xE9\x38\x32\xB1\xE1"
+ "\xCF\x0A\x23\x73\x7D\xC5\x80\x3D"
+ "\x9F\x6D\xA0\xD0\xEE\x93\x8A\x79"
+ "\x3A\xDD\x1D\xBB\x9E\x26\x5D\x01"
+ "\x44\xD0\xD4\x4E\xC3\xF1\xE4\x38"
+ "\x09\x62\x0A\x1A\x4E\xD2\x63\x0F"
+ "\x6E\x3E\xD2\xA4\x3A\xF4\xF3\xFF"
+ "\x7E\x42\xEC\xB6\x6F\x4D\x6B\x48"
+ "\xE6\xA6\x50\x80\x78\x9E\xF1\xB0"
+ "\x4D\xB2\x0D\x3D\xFC\x40\x25\x4D"
+ "\x93\x11\x1C\xE9\xD2\x9F\x6E\x90"
+ "\xE5\x41\x4A\xE2\x3C\x45\x29\x35"
+ "\xEC\xD6\x47\x50\xCB\x7B\xA2\x32"
+ "\xF7\x8B\x62\xF1\xE3\x9A\xFE\xC7"
+ "\x1D\x8C\x02\x72\x68\x09\xE9\xB6"
+ "\x4A\x80\xE6\xB1\x56\xDF\x90\xD4"
+ "\x93\x74\xA4\xCE\x20\x23\xBF\x48"
+ "\xA5\xDE\x1B\xFA\x40\x69\x31\x98"
+ "\x62\x6E\xA5\xC7\xBF\x0C\x62\xE5"
+ "\x6D\xE1\x93\xF1\x83\x10\x1C\xCA"
+ "\xF6\x5C\x19\xF8\x90\x78\xCB\xE4"
+ "\x0B\x3A\xB5\xF8\x43\x86\xD3\x3F"
+ "\xBA\x83\x34\x3C\x42\xCC\x7D\x28"
+ "\x29\x63\x4F\xD8\x02\x17\xC5\x07"
+ "\x2C\xA4\xAC\x79\xCB\xC3\xA9\x09"
+ "\x81\x45\x18\xED\xE4\xCB\x42\x3B"
+ "\x87\x2D\x23\xDC\xC5\xBA\x45\xBD"
+ "\x92\xE5\x02\x97\x96\xCE\xAD\xEC"
+ "\xBA\xD8\x76\xF8\xCA\xC1\x31\xEC"
+ "\x1E\x4F\x3F\x83\xF8\x33\xE8\x6E"
+ "\xCC\xF8\x5F\xDD\x65\x50\x99\x69"
+ "\xAF\x48\xCE\xA5\xBA\xB6\x14\x9F"
+ "\x05\x93\xB2\xE6\x59\xC8\x28\xFE"
+ "\x8F\x37\xF9\x64\xB9\xA5\x56\x8F"
+ "\xF1\x1B\x90\xEF\xAE\xEB\xFC\x09"
+ "\x11\x7A\xF2\x19\x0A\x0A\x9A\x3C"
+ "\xE2\x5E\x29\xFA\x31\x9B\xC1\x74"
+ "\x1E\x10\x3E\x07\xA9\x31\x6D\xF8"
+ "\x81\xF5\xD5\x8A\x04\x23\x51\xAC"
+ "\xA2\xE2\x63\xFD\x27\x1F\x79\x5B"
+ "\x1F\xE8\xDA\x11\x49\x4D\x1C\xBA"
+ "\x54\xCC\x0F\xBA\x92\x69\xE5\xCB"
+ "\x41\x1A\x67\xA6\x40\x82\x70\x8C"
+ "\x19\x79\x08\xA4\x51\x20\x7D\xC9"
+ "\x12\x27\xAE\x20\x0D\x2C\xA1\x6D"
+ "\xF4\x55\xD4\xE7\xE6\xD4\x28\x08"
+ "\x00\x70\x12\x56\x56\x50\xAD\x14"
+ "\x5C\x3E\xA2\xD1\x36\x3F\x36\x48"
+ "\xED\xB1\x57\x3E\x5D\x15\xF6\x1E"
+ "\x53\xE9\xA4\x3E\xED\x7D\xCF\x7D"
+ "\x29\xAF\xF3\x1E\x51\xA8\x9F\x85"
+ "\x8B\xF0\xBB\xCE\xCC\x39\xC3\x64"
+ "\x4B\xF2\xAD\x70\x19\xD4\x44\x8F"
+ "\x91\x76\xE8\x15\x66\x34\x9F\xF6"
+ "\x0F\x15\xA4\xA8\x24\xF8\x58\xB1"
+ "\x38\x46\x47\xC7\x9B\xCA\xE9\x42"
+ "\x44\xAA\xE6\xB5\x9C\x91\xA4\xD3"
+ "\x16\xA0\xED\x42\xBE\xB5\x06\x19"
+ "\xBE\x67\xE8\xBC\x22\x32\xA4\x1E"
+ "\x93\xEB\xBE\xE9\xE1\x93\xE5\x31"
+ "\x3A\xA2\x75\xDF\xE3\x6B\xE7\xCC"
+ "\xB4\x70\x20\xE0\x6D\x82\x7C\xC8"
+ "\x94\x5C\x5E\x37\x18\xAD\xED\x8B"
+ "\x44\x86\xCA\x5E\x07\xB7\x70\x8D"
+ "\x40\x48\x19\x73\x7C\x78\x64\x0B"
+ "\xDB\x01\xCA\xAE\x63\x19\xE9\xD1"
+ "\x6B\x2C\x84\x10\x45\x42\x2E\xC3"
+ "\xDF\x7F\xAA\xE8\x87\x1B\x63\x46"
+ "\x74\x28\x9D\x05\x30\x20\x62\x41"
+ "\xC0\x9F\x2C\x36\x2B\x78\xD7\x26"
+ "\xDF\x58\x51\xED\xFA\xDC\x87\x79"
+ "\xBF\x8C\xBF\xC4\x0F\xE5\x05\xDA"
+ "\x45\xE3\x35\x0D\x69\x91\x54\x1C"
+ "\xE7\x2C\x49\x08\x8B\x72\xFA\x5C"
+ "\xF1\x6B\xD9",
+ .rlen = 1011,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 1011 - 16, 16 },
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
+ "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06"
+ "\x9D\x34\xCB\x3F\xD6\x6D\x04\x78"
+ "\x0F\xA6\x1A\xB1\x48\xDF\x53\xEA"
+ "\x81\x18\x8C\x23\xBA\x2E\xC5\x5C"
+ "\xF3\x67\xFE\x95\x09\xA0\x37\xCE"
+ "\x42\xD9\x70\x07\x7B\x12\xA9\x1D"
+ "\xB4\x4B\xE2\x56\xED\x84\x1B\x8F"
+ "\x26\xBD\x31\xC8\x5F\xF6\x6A\x01"
+ "\x98\x0C\xA3\x3A\xD1\x45\xDC\x73"
+ "\x0A\x7E\x15\xAC\x20\xB7\x4E\xE5"
+ "\x59\xF0\x87\x1E\x92\x29\xC0\x34"
+ "\xCB\x62\xF9\x6D\x04\x9B\x0F\xA6"
+ "\x3D\xD4\x48\xDF\x76\x0D\x81\x18"
+ "\xAF\x23\xBA\x51\xE8\x5C\xF3\x8A"
+ "\x21\x95\x2C\xC3\x37\xCE\x65\xFC"
+ "\x70\x07\x9E\x12\xA9\x40\xD7\x4B"
+ "\xE2\x79\x10\x84\x1B\xB2\x26\xBD"
+ "\x54\xEB\x5F\xF6\x8D\x01\x98\x2F"
+ "\xC6\x3A\xD1\x68\xFF\x73\x0A\xA1"
+ "\x15\xAC\x43\xDA\x4E\xE5\x7C\x13"
+ "\x87\x1E\xB5\x29\xC0\x57\xEE\x62"
+ "\xF9\x90\x04\x9B\x32\xC9\x3D\xD4"
+ "\x6B\x02\x76\x0D\xA4\x18\xAF\x46"
+ "\xDD\x51\xE8\x7F\x16\x8A\x21\xB8"
+ "\x2C\xC3\x5A\xF1\x65\xFC\x93\x07"
+ "\x9E\x35\xCC\x40\xD7\x6E\x05\x79"
+ "\x10\xA7\x1B\xB2\x49\xE0\x54\xEB"
+ "\x82\x19\x8D\x24\xBB\x2F\xC6\x5D"
+ "\xF4\x68\xFF\x96\x0A\xA1\x38\xCF"
+ "\x43\xDA\x71\x08\x7C\x13\xAA\x1E"
+ "\xB5\x4C\xE3\x57\xEE\x85\x1C\x90"
+ "\x27\xBE\x32\xC9\x60\xF7\x6B\x02"
+ "\x99\x0D\xA4\x3B\xD2\x46\xDD\x74"
+ "\x0B\x7F\x16\xAD\x21\xB8\x4F\xE6"
+ "\x5A\xF1\x88\x1F\x93\x2A\xC1\x35"
+ "\xCC\x63\xFA\x6E\x05\x9C\x10\xA7"
+ "\x3E\xD5\x49\xE0\x77\x0E\x82\x19"
+ "\xB0\x24\xBB\x52\xE9\x5D\xF4\x8B"
+ "\x22\x96\x2D\xC4\x38\xCF\x66\xFD"
+ "\x71\x08\x9F\x13\xAA\x41\xD8\x4C"
+ "\xE3\x7A\x11\x85\x1C\xB3\x27\xBE"
+ "\x55\xEC\x60\xF7\x8E\x02\x99\x30"
+ "\xC7\x3B\xD2\x69\x00\x74\x0B\xA2"
+ "\x16\xAD\x44\xDB\x4F\xE6\x7D\x14"
+ "\x88\x1F\xB6\x2A\xC1\x58\xEF\x63"
+ "\xFA\x91\x05\x9C\x33\xCA\x3E\xD5"
+ "\x6C\x03\x77\x0E\xA5\x19\xB0\x47"
+ "\xDE\x52\xE9\x80\x17\x8B\x22\xB9"
+ "\x2D\xC4\x5B\xF2\x66\xFD\x94\x08"
+ "\x9F\x36\xCD\x41\xD8\x6F\x06\x7A"
+ "\x11\xA8\x1C\xB3\x4A\xE1\x55\xEC"
+ "\x83\x1A\x8E\x25\xBC\x30\xC7\x5E"
+ "\xF5\x69\x00\x97\x0B\xA2\x39\xD0"
+ "\x44\xDB\x72\x09\x7D\x14\xAB\x1F"
+ "\xB6\x4D\xE4\x58\xEF\x86\x1D\x91"
+ "\x28\xBF\x33\xCA\x61\xF8\x6C\x03"
+ "\x9A\x0E\xA5\x3C\xD3\x47\xDE\x75"
+ "\x0C\x80\x17\xAE\x22\xB9\x50\xE7"
+ "\x5B\xF2\x89\x20\x94\x2B\xC2\x36"
+ "\xCD\x64\xFB\x6F\x06\x9D\x11\xA8"
+ "\x3F\xD6\x4A\xE1\x78\x0F\x83\x1A"
+ "\xB1\x25\xBC\x53\xEA\x5E\xF5\x8C"
+ "\x00\x97\x2E\xC5\x39\xD0\x67\xFE"
+ "\x72\x09\xA0\x14\xAB\x42\xD9\x4D",
+ .ilen = 1008,
+ .result = "\x85\x79\x6C\x8B\x2B\x6D\x14\xF9"
+ "\xA6\x83\xB6\x80\x5B\x3A\xF3\x7E"
+ "\x30\x29\xEB\x1F\xDC\x19\x5F\xEB"
+ "\xF7\xC4\x27\x04\x51\x87\xD7\x6F"
+ "\xB8\x4E\x07\xFB\xAC\x3B\x08\xB4"
+ "\x4D\xCB\xE8\xE1\x71\x7D\x4F\x48"
+ "\xCD\x81\x64\xA5\xC4\x07\x1A\x9A"
+ "\x4B\x62\x90\x0E\xC8\xB3\x2B\x6B"
+ "\x8F\x9C\x6E\x72\x4B\xBA\xEF\x07"
+ "\x2C\x56\x07\x5E\x37\x30\x60\xA9"
+ "\xE3\xEF\xD6\x69\xE1\xA1\x77\x64"
+ "\x93\x75\x7A\xB7\x7A\x3B\xE9\x43"
+ "\x23\x35\x95\x91\x80\x8A\xC7\xCF"
+ "\xC3\xD5\xBF\xE7\xFE\x4C\x06\x6B"
+ "\x05\x19\x48\xE2\x62\xBA\x4F\xF2"
+ "\xFB\xEE\xE4\xCB\x79\x9D\xA3\x10"
+ "\x1D\x29\x8C\x1D\x7A\x88\x5A\xDD"
+ "\x4E\xB6\x18\xAA\xCD\xE6\x33\x96"
+ "\xD9\x0F\x90\x5A\x78\x76\x4D\x77"
+ "\x3C\x20\x89\x3B\xA3\xF9\x07\xFD"
+ "\xE4\xE8\x20\x2D\x15\x0A\x63\x49"
+ "\xF5\x4F\x89\xD8\xDE\xA1\x28\x78"
+ "\x28\x07\x09\x1B\x03\x94\x1D\x4B"
+ "\x82\x28\x1E\x1D\x95\xBA\xAC\x85"
+ "\x71\x6E\x3C\x18\x4B\x77\x74\x79"
+ "\xBF\x67\x0A\x53\x3C\x94\xD9\x60"
+ "\xE9\x6D\x40\x34\xA0\x2A\x53\x5D"
+ "\x27\xD5\x47\xF9\xC3\x4B\x27\x29"
+ "\xE4\x76\x9C\x3F\xA7\x1C\x87\xFC"
+ "\x6E\x0F\xCF\x9B\x60\xF0\xF0\x8B"
+ "\x70\x1C\x84\x81\x72\x4D\xB4\x98"
+ "\x23\x62\xE7\x6A\x2B\xFC\xA5\xB2"
+ "\xFF\xF5\x71\x07\xCD\x90\x23\x13"
+ "\x19\xD7\x79\x36\x6C\x9D\x55\x8B"
+ "\x93\x78\x86\x05\x69\x46\xD0\xC5"
+ "\x39\x09\xEB\x79\xEF\xFA\x9F\xAE"
+ "\xF3\xD5\x44\xC3\xFD\x86\xD2\x7C"
+ "\x83\x4B\xD8\x75\x9C\x18\x04\x7B"
+ "\x73\xAD\x72\xA4\xF6\xAB\xCF\x4B"
+ "\xCC\x01\x45\x90\xA6\x43\x05\x0C"
+ "\x6C\x4F\x62\x77\x57\x97\x9F\xEE"
+ "\x75\xA7\x3C\x38\xD1\x0F\x3D\x0E"
+ "\x2C\x43\x98\xFB\x13\x65\x73\xE4"
+ "\x3C\x1E\xD6\x90\x08\xF7\xE0\x99"
+ "\x3B\xF1\x9D\x6C\x48\xA9\x0E\x32"
+ "\x17\xC2\xCC\x20\xA1\x19\x26\xAA"
+ "\xE0\x75\x2F\xFB\x54\x66\x0A\xDF"
+ "\xB5\xF2\x1F\xC1\x34\x3C\x30\x56"
+ "\xE8\xDC\xF7\x92\x6B\xBF\x17\x24"
+ "\xEC\x94\xB5\x3B\xD6\xCE\xA2\x54"
+ "\x10\x7F\x50\xDE\x69\x77\xD5\x37"
+ "\xFE\x9C\x10\x83\xC5\xEB\xC9\x53"
+ "\xB7\xF3\xC4\x20\xAF\x0A\x7E\x57"
+ "\x3A\xE6\x75\xFE\x89\x00\x6E\x48"
+ "\xFB\x99\x17\x2C\xF6\x64\x40\x95"
+ "\x5E\xDC\x7A\xA6\x70\xC7\xF4\xDD"
+ "\x52\x05\x24\x34\xF9\x0E\xC8\x64"
+ "\x6D\xE2\xD8\x80\x53\x31\x4C\xFE"
+ "\xB4\x3A\x5F\x19\xCF\x42\x1B\x22"
+ "\x0B\x2D\x7B\xF1\xC5\x43\xF7\x5E"
+ "\x12\xA8\x01\x64\x16\x0B\x26\x5A"
+ "\x0C\x95\x0F\x40\xC5\x5A\x06\x7C"
+ "\xCF\xF5\xD5\xB7\x7A\x34\x23\xB6"
+ "\xAA\x9E\xA8\x98\xA2\xF8\x3D\xD3"
+ "\x3F\x23\x69\x63\x56\x96\x45\xD6"
+ "\x74\x23\x1D\x5C\x63\xCC\xD8\x78"
+ "\x16\xE2\x9C\xD2\x80\x02\xF2\x28"
+ "\x69\x2F\xC4\xA8\x15\x15\x24\x3B"
+ "\xCB\xF0\x14\xE4\x62\xC8\xF3\xD1"
+ "\x03\x58\x1B\x33\x77\x74\x1F\xB4"
+ "\x07\x86\xF2\x21\xB7\x41\xAE\xBF"
+ "\x25\xC2\xFF\x51\xEF\xEA\xCE\xC4"
+ "\x5F\xD9\xB8\x18\x6A\xF0\x0F\x0D"
+ "\xF8\x04\xBB\x6D\x62\x33\x87\x26"
+ "\x4F\x2F\x14\x6E\xDC\xDB\x66\x09"
+ "\x2A\xEF\x7D\x84\x10\xAC\x82\x5E"
+ "\xD2\xE4\xAD\x74\x7A\x6D\xCC\x3A"
+ "\x7B\x62\xD8\xD6\x07\x2D\xF7\xDF"
+ "\x9B\xB3\x82\xCF\x9C\x1D\x76\x5C"
+ "\xAC\x7B\xD4\x9B\x45\xA1\x64\x11"
+ "\x66\xF1\xA7\x0B\xF9\xDD\x00\xDD"
+ "\xA4\x45\x3D\x3E\x03\xC9\x2E\xCB"
+ "\xC3\x14\x84\x72\xFD\x41\xDC\xBD"
+ "\x75\xBE\xA8\xE5\x16\x48\x64\x39"
+ "\xCA\xF3\xE6\xDC\x25\x24\xF1\x6D"
+ "\xB2\x8D\xC5\x38\x54\xD3\x5D\x6D"
+ "\x0B\x29\x10\x15\x0E\x13\x3B\xAC"
+ "\x7E\xCC\x9E\x3E\x18\x48\xA6\x02"
+ "\xEF\x03\xB2\x2E\xE3\xD2\x70\x21"
+ "\xB4\x19\x26\xBE\x3A\x3D\x05\xE0"
+ "\xF8\x09\xAF\xE4\x31\x26\x92\x2F"
+ "\x8F\x55\xAC\xED\x0B\xB2\xA5\x34"
+ "\xBE\x50\xB1\x02\x22\x96\xE3\x40"
+ "\x7B\x70\x50\x6E\x3B\xD5\xE5\xA0"
+ "\x8E\xA2\xAD\x14\x60\x5C\x7A\x2B"
+ "\x3D\x1B\x7F\xC1\xC0\x2C\x56\x36"
+ "\xD2\x0A\x32\x06\x97\x34\xB9\xF4"
+ "\x6F\x9F\x7E\x80\xD0\x9D\xF7\x6A"
+ "\x21\xC1\xA2\x6A\xB1\x96\x5B\x4D"
+ "\x7A\x15\x6C\xC4\x4E\xB8\xE0\x9E"
+ "\x6C\x50\xF3\x9C\xC9\xB5\x23\xB7"
+ "\xF1\xD4\x29\x4A\x23\xC4\xAD\x1E"
+ "\x2C\x07\xD2\x43\x5F\x57\x93\xCA"
+ "\x85\xF9\x9F\xAD\x4C\xF1\xE4\xB1"
+ "\x1A\x8E\x28\xA4\xB6\x52\x77\x7E"
+ "\x68\xC6\x47\xB9\x76\xCC\x65\x5F"
+ "\x0B\xF9\x67\x93\xD8\x0E\x9A\x37"
+ "\x5F\x41\xED\x64\x6C\xAD\x5F\xED"
+ "\x3F\x8D\xFB\x8E\x1E\xA0\xE4\x1F"
+ "\xC2\xC7\xED\x18\x43\xE1\x20\x86"
+ "\x5D\xBC\x30\x70\x22\xA1\xDC\x53"
+ "\x10\x3A\x8D\x47\x82\xCD\x7F\x59"
+ "\x03\x2D\x6D\xF5\xE7\x79\xD4\x07"
+ "\x68\x2A\xA5\x42\x19\x4D\xAF\xF5"
+ "\xED\x47\x83\xBC\x5F\x62\x84\xDA"
+ "\xDA\x41\xFF\xB0\x1D\x64\xA3\xC8"
+ "\xBD\x4E\xE0\xB8\x7F\xEE\x55\x0A"
+ "\x4E\x61\xB2\x51\xF6\x9C\x95\xF6"
+ "\x92\xBB\xF6\xC5\xF0\x09\x86\xDE"
+ "\x37\x9E\x29\xF9\x2A\x18\x73\x0D"
+ "\xDC\x7E\x6B\x7B\x1B\x43\x8C\xEA"
+ "\x13\xC8\x1A\x47\x0A\x2D\x6D\x56"
+ "\xCD\xD2\xE7\x53\x1A\xAB\x1C\x3C"
+ "\xC5\x9B\x03\x70\x29\x2A\x49\x09"
+ "\x67\xA1\xEA\xD6\x3A\x5B\xBF\x71"
+ "\x1D\x48\x64\x6C\xFB\xC0\x9E\x36",
+ .rlen = 1008,
+ },
+};
+
+static struct cipher_testvec camellia_ctr_dec_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\xF3\x06\x3A\x84\xCD\xBA\x8E\x11"
+ "\xB7\x74\x6F\x5C\x97\xFB\x36\xFE"
+ "\xDE\x71\x58\xD4\x15\xD1\xC1\xA4"
+ "\xC9\x28\x74\xA6\x6B\xC7\x95\xA6"
+ "\x6C\x77\xF7\x2F\xDF\xC7\xBB\x85"
+ "\x60\xFC\xE8\x94\xE8\xB5\x09\x2C"
+ "\x1E\x43\xEF\x6C\xE9\x98\xC5\xA0"
+ "\x7B\x13\xE5\x7F\xF8\x49\x9A\x8C"
+ "\xE6\x7B\x08\xC3\x32\x66\x55\x4E"
+ "\xA5\x44\x1D\x2C\x18\xC7\x29\x1F"
+ "\x61\x28\x4A\xE3\xCD\xE5\x47\xB2"
+ "\x82\x2F\x66\x83\x91\x51\xAE\xD7"
+ "\x1C\x91\x3C\x57\xE3\x1D\x5A\xC9"
+ "\xFD\xC5\x58\x58\xEF\xCC\x33\xC9"
+ "\x0F\xEA\x26\x32\xD1\x15\x19\x2D"
+ "\x25\xB4\x7F\xB0\xDF\xFB\x88\x60"
+ "\x4E\x4D\x06\x7D\xCC\x1F\xED\x3B"
+ "\x68\x84\xD5\xB3\x1B\xE7\xB9\xA1"
+ "\x68\x8B\x2C\x1A\x44\xDA\x63\xD3"
+ "\x29\xE9\x59\x32\x1F\x30\x1C\x43"
+ "\xEA\x3A\xA3\x6B\x54\x3C\xAA\x11"
+ "\xAD\x38\x20\xC9\xB9\x8A\x64\x66"
+ "\x5A\x07\x49\xDF\xA1\x9C\xF9\x76"
+ "\x36\x65\xB6\x81\x8F\x76\x09\xE5"
+ "\xEB\xD1\x29\xA4\xE4\xF4\x4C\xCD"
+ "\xAF\xFC\xB9\x16\xD9\xC3\x73\x6A"
+ "\x33\x12\xF8\x7E\xBC\xCC\x7D\x80"
+ "\xBF\x3C\x25\x06\x13\x84\xFA\x35"
+ "\xF7\x40\xFA\xA1\x44\x13\x70\xD8"
+ "\x01\xF9\x85\x15\x63\xEC\x7D\xB9"
+ "\x02\xD8\xBA\x41\x6C\x92\x68\x66"
+ "\x95\xDD\xD6\x42\xE7\xBB\xE1\xFD"
+ "\x28\x3E\x94\xB6\xBD\xA7\xBF\x47"
+ "\x58\x8D\xFF\x19\x30\x75\x0D\x48"
+ "\x94\xE9\xA6\xCD\xB3\x8E\x1E\xCD"
+ "\x59\xBC\x1A\xAC\x3C\x4F\xA9\xEB"
+ "\xF4\xA7\xE4\x75\x4A\x18\x40\xC9"
+ "\x1E\xEC\x06\x9C\x28\x4B\xF7\x2B"
+ "\xE2\xEF\xD6\x42\x2E\xBB\xFC\x0A"
+ "\x79\xA2\x99\x28\x93\x1B\x00\x57"
+ "\x35\x1E\x1A\x93\x90\xA4\x68\x95"
+ "\x5E\x57\x40\xD5\xA9\xAA\x19\x48"
+ "\xEC\xFF\x76\x77\xDC\x78\x89\x76"
+ "\xE5\x3B\x00\xEC\x58\x4D\xD1\xE3"
+ "\xC8\x6C\x2C\x45\x5E\x5F\xD9\x4E"
+ "\x71\xA5\x36\x6D\x03\xF1\xC7\xD5"
+ "\xF3\x63\xC0\xD8\xCB\x2B\xF1\xA8"
+ "\xB9\x2B\xE6\x0B\xB9\x65\x78\xA0"
+ "\xC4\x46\xE6\x9B\x8B\x43\x2D\xAB"
+ "\x70\xA6\xE0\x59\x1E\xAC\x9D\xE0"
+ "\x76\x44\x45\xF3\x24\x11\x57\x98"
+ "\x9A\x86\xB4\x12\x80\x28\x86\x20"
+ "\x23\x9D\x2D\xE9\x38\x32\xB1\xE1"
+ "\xCF\x0A\x23\x73\x7D\xC5\x80\x3D"
+ "\x9F\x6D\xA0\xD0\xEE\x93\x8A\x79"
+ "\x3A\xDD\x1D\xBB\x9E\x26\x5D\x01"
+ "\x44\xD0\xD4\x4E\xC3\xF1\xE4\x38"
+ "\x09\x62\x0A\x1A\x4E\xD2\x63\x0F"
+ "\x6E\x3E\xD2\xA4\x3A\xF4\xF3\xFF"
+ "\x7E\x42\xEC\xB6\x6F\x4D\x6B\x48"
+ "\xE6\xA6\x50\x80\x78\x9E\xF1\xB0"
+ "\x4D\xB2\x0D\x3D\xFC\x40\x25\x4D",
+ .ilen = 496,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7",
+ .rlen = 496,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\xF3\x06\x3A\x84\xCD\xBA\x8E\x11"
+ "\xB7\x74\x6F\x5C\x97\xFB\x36\xFE"
+ "\xDE\x71\x58\xD4\x15\xD1\xC1\xA4"
+ "\xC9\x28\x74\xA6\x6B\xC7\x95\xA6"
+ "\x6C\x77\xF7\x2F\xDF\xC7\xBB\x85"
+ "\x60\xFC\xE8\x94\xE8\xB5\x09\x2C"
+ "\x1E\x43\xEF\x6C\xE9\x98\xC5\xA0"
+ "\x7B\x13\xE5\x7F\xF8\x49\x9A\x8C"
+ "\xE6\x7B\x08\xC3\x32\x66\x55\x4E"
+ "\xA5\x44\x1D\x2C\x18\xC7\x29\x1F"
+ "\x61\x28\x4A\xE3\xCD\xE5\x47\xB2"
+ "\x82\x2F\x66\x83\x91\x51\xAE\xD7"
+ "\x1C\x91\x3C\x57\xE3\x1D\x5A\xC9"
+ "\xFD\xC5\x58\x58\xEF\xCC\x33\xC9"
+ "\x0F\xEA\x26\x32\xD1\x15\x19\x2D"
+ "\x25\xB4\x7F\xB0\xDF\xFB\x88\x60"
+ "\x4E\x4D\x06\x7D\xCC\x1F\xED\x3B"
+ "\x68\x84\xD5\xB3\x1B\xE7\xB9\xA1"
+ "\x68\x8B\x2C\x1A\x44\xDA\x63\xD3"
+ "\x29\xE9\x59\x32\x1F\x30\x1C\x43"
+ "\xEA\x3A\xA3\x6B\x54\x3C\xAA\x11"
+ "\xAD\x38\x20\xC9\xB9\x8A\x64\x66"
+ "\x5A\x07\x49\xDF\xA1\x9C\xF9\x76"
+ "\x36\x65\xB6\x81\x8F\x76\x09\xE5"
+ "\xEB\xD1\x29\xA4\xE4\xF4\x4C\xCD"
+ "\xAF\xFC\xB9\x16\xD9\xC3\x73\x6A"
+ "\x33\x12\xF8\x7E\xBC\xCC\x7D\x80"
+ "\xBF\x3C\x25\x06\x13\x84\xFA\x35"
+ "\xF7\x40\xFA\xA1\x44\x13\x70\xD8"
+ "\x01\xF9\x85\x15\x63\xEC\x7D\xB9"
+ "\x02\xD8\xBA\x41\x6C\x92\x68\x66"
+ "\x95\xDD\xD6\x42\xE7\xBB\xE1\xFD"
+ "\x28\x3E\x94\xB6\xBD\xA7\xBF\x47"
+ "\x58\x8D\xFF\x19\x30\x75\x0D\x48"
+ "\x94\xE9\xA6\xCD\xB3\x8E\x1E\xCD"
+ "\x59\xBC\x1A\xAC\x3C\x4F\xA9\xEB"
+ "\xF4\xA7\xE4\x75\x4A\x18\x40\xC9"
+ "\x1E\xEC\x06\x9C\x28\x4B\xF7\x2B"
+ "\xE2\xEF\xD6\x42\x2E\xBB\xFC\x0A"
+ "\x79\xA2\x99\x28\x93\x1B\x00\x57"
+ "\x35\x1E\x1A\x93\x90\xA4\x68\x95"
+ "\x5E\x57\x40\xD5\xA9\xAA\x19\x48"
+ "\xEC\xFF\x76\x77\xDC\x78\x89\x76"
+ "\xE5\x3B\x00\xEC\x58\x4D\xD1\xE3"
+ "\xC8\x6C\x2C\x45\x5E\x5F\xD9\x4E"
+ "\x71\xA5\x36\x6D\x03\xF1\xC7\xD5"
+ "\xF3\x63\xC0\xD8\xCB\x2B\xF1\xA8"
+ "\xB9\x2B\xE6\x0B\xB9\x65\x78\xA0"
+ "\xC4\x46\xE6\x9B\x8B\x43\x2D\xAB"
+ "\x70\xA6\xE0\x59\x1E\xAC\x9D\xE0"
+ "\x76\x44\x45\xF3\x24\x11\x57\x98"
+ "\x9A\x86\xB4\x12\x80\x28\x86\x20"
+ "\x23\x9D\x2D\xE9\x38\x32\xB1\xE1"
+ "\xCF\x0A\x23\x73\x7D\xC5\x80\x3D"
+ "\x9F\x6D\xA0\xD0\xEE\x93\x8A\x79"
+ "\x3A\xDD\x1D\xBB\x9E\x26\x5D\x01"
+ "\x44\xD0\xD4\x4E\xC3\xF1\xE4\x38"
+ "\x09\x62\x0A\x1A\x4E\xD2\x63\x0F"
+ "\x6E\x3E\xD2\xA4\x3A\xF4\xF3\xFF"
+ "\x7E\x42\xEC\xB6\x6F\x4D\x6B\x48"
+ "\xE6\xA6\x50\x80\x78\x9E\xF1\xB0"
+ "\x4D\xB2\x0D\x3D\xFC\x40\x25\x4D"
+ "\x93\x11\x1C\xE9\xD2\x9F\x6E\x90"
+ "\xE5\x41\x4A\xE2\x3C\x45\x29\x35"
+ "\xEC\xD6\x47\x50\xCB\x7B\xA2\x32"
+ "\xF7\x8B\x62\xF1\xE3\x9A\xFE\xC7"
+ "\x1D\x8C\x02\x72\x68\x09\xE9\xB6"
+ "\x4A\x80\xE6\xB1\x56\xDF\x90\xD4"
+ "\x93\x74\xA4\xCE\x20\x23\xBF\x48"
+ "\xA5\xDE\x1B\xFA\x40\x69\x31\x98"
+ "\x62\x6E\xA5\xC7\xBF\x0C\x62\xE5"
+ "\x6D\xE1\x93\xF1\x83\x10\x1C\xCA"
+ "\xF6\x5C\x19\xF8\x90\x78\xCB\xE4"
+ "\x0B\x3A\xB5\xF8\x43\x86\xD3\x3F"
+ "\xBA\x83\x34\x3C\x42\xCC\x7D\x28"
+ "\x29\x63\x4F\xD8\x02\x17\xC5\x07"
+ "\x2C\xA4\xAC\x79\xCB\xC3\xA9\x09"
+ "\x81\x45\x18\xED\xE4\xCB\x42\x3B"
+ "\x87\x2D\x23\xDC\xC5\xBA\x45\xBD"
+ "\x92\xE5\x02\x97\x96\xCE\xAD\xEC"
+ "\xBA\xD8\x76\xF8\xCA\xC1\x31\xEC"
+ "\x1E\x4F\x3F\x83\xF8\x33\xE8\x6E"
+ "\xCC\xF8\x5F\xDD\x65\x50\x99\x69"
+ "\xAF\x48\xCE\xA5\xBA\xB6\x14\x9F"
+ "\x05\x93\xB2\xE6\x59\xC8\x28\xFE"
+ "\x8F\x37\xF9\x64\xB9\xA5\x56\x8F"
+ "\xF1\x1B\x90\xEF\xAE\xEB\xFC\x09"
+ "\x11\x7A\xF2\x19\x0A\x0A\x9A\x3C"
+ "\xE2\x5E\x29\xFA\x31\x9B\xC1\x74"
+ "\x1E\x10\x3E\x07\xA9\x31\x6D\xF8"
+ "\x81\xF5\xD5\x8A\x04\x23\x51\xAC"
+ "\xA2\xE2\x63\xFD\x27\x1F\x79\x5B"
+ "\x1F\xE8\xDA\x11\x49\x4D\x1C\xBA"
+ "\x54\xCC\x0F\xBA\x92\x69\xE5\xCB"
+ "\x41\x1A\x67\xA6\x40\x82\x70\x8C"
+ "\x19\x79\x08\xA4\x51\x20\x7D\xC9"
+ "\x12\x27\xAE\x20\x0D\x2C\xA1\x6D"
+ "\xF4\x55\xD4\xE7\xE6\xD4\x28\x08"
+ "\x00\x70\x12\x56\x56\x50\xAD\x14"
+ "\x5C\x3E\xA2\xD1\x36\x3F\x36\x48"
+ "\xED\xB1\x57\x3E\x5D\x15\xF6\x1E"
+ "\x53\xE9\xA4\x3E\xED\x7D\xCF\x7D"
+ "\x29\xAF\xF3\x1E\x51\xA8\x9F\x85"
+ "\x8B\xF0\xBB\xCE\xCC\x39\xC3\x64"
+ "\x4B\xF2\xAD\x70\x19\xD4\x44\x8F"
+ "\x91\x76\xE8\x15\x66\x34\x9F\xF6"
+ "\x0F\x15\xA4\xA8\x24\xF8\x58\xB1"
+ "\x38\x46\x47\xC7\x9B\xCA\xE9\x42"
+ "\x44\xAA\xE6\xB5\x9C\x91\xA4\xD3"
+ "\x16\xA0\xED\x42\xBE\xB5\x06\x19"
+ "\xBE\x67\xE8\xBC\x22\x32\xA4\x1E"
+ "\x93\xEB\xBE\xE9\xE1\x93\xE5\x31"
+ "\x3A\xA2\x75\xDF\xE3\x6B\xE7\xCC"
+ "\xB4\x70\x20\xE0\x6D\x82\x7C\xC8"
+ "\x94\x5C\x5E\x37\x18\xAD\xED\x8B"
+ "\x44\x86\xCA\x5E\x07\xB7\x70\x8D"
+ "\x40\x48\x19\x73\x7C\x78\x64\x0B"
+ "\xDB\x01\xCA\xAE\x63\x19\xE9\xD1"
+ "\x6B\x2C\x84\x10\x45\x42\x2E\xC3"
+ "\xDF\x7F\xAA\xE8\x87\x1B\x63\x46"
+ "\x74\x28\x9D\x05\x30\x20\x62\x41"
+ "\xC0\x9F\x2C\x36\x2B\x78\xD7\x26"
+ "\xDF\x58\x51\xED\xFA\xDC\x87\x79"
+ "\xBF\x8C\xBF\xC4\x0F\xE5\x05\xDA"
+ "\x45\xE3\x35\x0D\x69\x91\x54\x1C"
+ "\xE7\x2C\x49\x08\x8B\x72\xFA\x5C"
+ "\xF1\x6B\xD9",
+ .ilen = 1011,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06"
+ "\x9D\x34\xCB\x3F\xD6\x6D\x04\x78"
+ "\x0F\xA6\x1A\xB1\x48\xDF\x53\xEA"
+ "\x81\x18\x8C\x23\xBA\x2E\xC5\x5C"
+ "\xF3\x67\xFE\x95\x09\xA0\x37\xCE"
+ "\x42\xD9\x70\x07\x7B\x12\xA9\x1D"
+ "\xB4\x4B\xE2\x56\xED\x84\x1B\x8F"
+ "\x26\xBD\x31\xC8\x5F\xF6\x6A\x01"
+ "\x98\x0C\xA3\x3A\xD1\x45\xDC\x73"
+ "\x0A\x7E\x15\xAC\x20\xB7\x4E\xE5"
+ "\x59\xF0\x87\x1E\x92\x29\xC0\x34"
+ "\xCB\x62\xF9\x6D\x04\x9B\x0F\xA6"
+ "\x3D\xD4\x48\xDF\x76\x0D\x81\x18"
+ "\xAF\x23\xBA\x51\xE8\x5C\xF3\x8A"
+ "\x21\x95\x2C\xC3\x37\xCE\x65\xFC"
+ "\x70\x07\x9E\x12\xA9\x40\xD7\x4B"
+ "\xE2\x79\x10\x84\x1B\xB2\x26\xBD"
+ "\x54\xEB\x5F\xF6\x8D\x01\x98\x2F"
+ "\xC6\x3A\xD1\x68\xFF\x73\x0A\xA1"
+ "\x15\xAC\x43\xDA\x4E\xE5\x7C\x13"
+ "\x87\x1E\xB5\x29\xC0\x57\xEE\x62"
+ "\xF9\x90\x04\x9B\x32\xC9\x3D\xD4"
+ "\x6B\x02\x76\x0D\xA4\x18\xAF\x46"
+ "\xDD\x51\xE8\x7F\x16\x8A\x21\xB8"
+ "\x2C\xC3\x5A\xF1\x65\xFC\x93\x07"
+ "\x9E\x35\xCC\x40\xD7\x6E\x05\x79"
+ "\x10\xA7\x1B\xB2\x49\xE0\x54\xEB"
+ "\x82\x19\x8D\x24\xBB\x2F\xC6\x5D"
+ "\xF4\x68\xFF\x96\x0A\xA1\x38\xCF"
+ "\x43\xDA\x71\x08\x7C\x13\xAA\x1E"
+ "\xB5\x4C\xE3\x57\xEE\x85\x1C\x90"
+ "\x27\xBE\x32\xC9\x60\xF7\x6B\x02"
+ "\x99\x0D\xA4\x3B\xD2\x46\xDD\x74"
+ "\x0B\x7F\x16\xAD\x21\xB8\x4F\xE6"
+ "\x5A\xF1\x88\x1F\x93\x2A\xC1\x35"
+ "\xCC\x63\xFA\x6E\x05\x9C\x10\xA7"
+ "\x3E\xD5\x49\xE0\x77\x0E\x82\x19"
+ "\xB0\x24\xBB\x52\xE9\x5D\xF4\x8B"
+ "\x22\x96\x2D\xC4\x38\xCF\x66\xFD"
+ "\x71\x08\x9F\x13\xAA\x41\xD8\x4C"
+ "\xE3\x7A\x11\x85\x1C\xB3\x27\xBE"
+ "\x55\xEC\x60\xF7\x8E\x02\x99\x30"
+ "\xC7\x3B\xD2\x69\x00\x74\x0B\xA2"
+ "\x16\xAD\x44\xDB\x4F\xE6\x7D\x14"
+ "\x88\x1F\xB6\x2A\xC1\x58\xEF\x63"
+ "\xFA\x91\x05\x9C\x33\xCA\x3E\xD5"
+ "\x6C\x03\x77\x0E\xA5\x19\xB0\x47"
+ "\xDE\x52\xE9\x80\x17\x8B\x22\xB9"
+ "\x2D\xC4\x5B\xF2\x66\xFD\x94\x08"
+ "\x9F\x36\xCD\x41\xD8\x6F\x06\x7A"
+ "\x11\xA8\x1C\xB3\x4A\xE1\x55\xEC"
+ "\x83\x1A\x8E\x25\xBC\x30\xC7\x5E"
+ "\xF5\x69\x00\x97\x0B\xA2\x39\xD0"
+ "\x44\xDB\x72\x09\x7D\x14\xAB\x1F"
+ "\xB6\x4D\xE4\x58\xEF\x86\x1D\x91"
+ "\x28\xBF\x33\xCA\x61\xF8\x6C\x03"
+ "\x9A\x0E\xA5\x3C\xD3\x47\xDE\x75"
+ "\x0C\x80\x17\xAE\x22\xB9\x50\xE7"
+ "\x5B\xF2\x89\x20\x94\x2B\xC2\x36"
+ "\xCD\x64\xFB\x6F\x06\x9D\x11\xA8"
+ "\x3F\xD6\x4A\xE1\x78\x0F\x83\x1A"
+ "\xB1\x25\xBC\x53\xEA\x5E\xF5\x8C"
+ "\x00\x97\x2E\xC5\x39\xD0\x67\xFE"
+ "\x72\x09\xA0\x14\xAB\x42\xD9\x4D"
+ "\xE4\x7B\x12",
+ .rlen = 1011,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 1011 - 16, 16 },
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
+ "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD",
+ .input = "\x85\x79\x6C\x8B\x2B\x6D\x14\xF9"
+ "\xA6\x83\xB6\x80\x5B\x3A\xF3\x7E"
+ "\x30\x29\xEB\x1F\xDC\x19\x5F\xEB"
+ "\xF7\xC4\x27\x04\x51\x87\xD7\x6F"
+ "\xB8\x4E\x07\xFB\xAC\x3B\x08\xB4"
+ "\x4D\xCB\xE8\xE1\x71\x7D\x4F\x48"
+ "\xCD\x81\x64\xA5\xC4\x07\x1A\x9A"
+ "\x4B\x62\x90\x0E\xC8\xB3\x2B\x6B"
+ "\x8F\x9C\x6E\x72\x4B\xBA\xEF\x07"
+ "\x2C\x56\x07\x5E\x37\x30\x60\xA9"
+ "\xE3\xEF\xD6\x69\xE1\xA1\x77\x64"
+ "\x93\x75\x7A\xB7\x7A\x3B\xE9\x43"
+ "\x23\x35\x95\x91\x80\x8A\xC7\xCF"
+ "\xC3\xD5\xBF\xE7\xFE\x4C\x06\x6B"
+ "\x05\x19\x48\xE2\x62\xBA\x4F\xF2"
+ "\xFB\xEE\xE4\xCB\x79\x9D\xA3\x10"
+ "\x1D\x29\x8C\x1D\x7A\x88\x5A\xDD"
+ "\x4E\xB6\x18\xAA\xCD\xE6\x33\x96"
+ "\xD9\x0F\x90\x5A\x78\x76\x4D\x77"
+ "\x3C\x20\x89\x3B\xA3\xF9\x07\xFD"
+ "\xE4\xE8\x20\x2D\x15\x0A\x63\x49"
+ "\xF5\x4F\x89\xD8\xDE\xA1\x28\x78"
+ "\x28\x07\x09\x1B\x03\x94\x1D\x4B"
+ "\x82\x28\x1E\x1D\x95\xBA\xAC\x85"
+ "\x71\x6E\x3C\x18\x4B\x77\x74\x79"
+ "\xBF\x67\x0A\x53\x3C\x94\xD9\x60"
+ "\xE9\x6D\x40\x34\xA0\x2A\x53\x5D"
+ "\x27\xD5\x47\xF9\xC3\x4B\x27\x29"
+ "\xE4\x76\x9C\x3F\xA7\x1C\x87\xFC"
+ "\x6E\x0F\xCF\x9B\x60\xF0\xF0\x8B"
+ "\x70\x1C\x84\x81\x72\x4D\xB4\x98"
+ "\x23\x62\xE7\x6A\x2B\xFC\xA5\xB2"
+ "\xFF\xF5\x71\x07\xCD\x90\x23\x13"
+ "\x19\xD7\x79\x36\x6C\x9D\x55\x8B"
+ "\x93\x78\x86\x05\x69\x46\xD0\xC5"
+ "\x39\x09\xEB\x79\xEF\xFA\x9F\xAE"
+ "\xF3\xD5\x44\xC3\xFD\x86\xD2\x7C"
+ "\x83\x4B\xD8\x75\x9C\x18\x04\x7B"
+ "\x73\xAD\x72\xA4\xF6\xAB\xCF\x4B"
+ "\xCC\x01\x45\x90\xA6\x43\x05\x0C"
+ "\x6C\x4F\x62\x77\x57\x97\x9F\xEE"
+ "\x75\xA7\x3C\x38\xD1\x0F\x3D\x0E"
+ "\x2C\x43\x98\xFB\x13\x65\x73\xE4"
+ "\x3C\x1E\xD6\x90\x08\xF7\xE0\x99"
+ "\x3B\xF1\x9D\x6C\x48\xA9\x0E\x32"
+ "\x17\xC2\xCC\x20\xA1\x19\x26\xAA"
+ "\xE0\x75\x2F\xFB\x54\x66\x0A\xDF"
+ "\xB5\xF2\x1F\xC1\x34\x3C\x30\x56"
+ "\xE8\xDC\xF7\x92\x6B\xBF\x17\x24"
+ "\xEC\x94\xB5\x3B\xD6\xCE\xA2\x54"
+ "\x10\x7F\x50\xDE\x69\x77\xD5\x37"
+ "\xFE\x9C\x10\x83\xC5\xEB\xC9\x53"
+ "\xB7\xF3\xC4\x20\xAF\x0A\x7E\x57"
+ "\x3A\xE6\x75\xFE\x89\x00\x6E\x48"
+ "\xFB\x99\x17\x2C\xF6\x64\x40\x95"
+ "\x5E\xDC\x7A\xA6\x70\xC7\xF4\xDD"
+ "\x52\x05\x24\x34\xF9\x0E\xC8\x64"
+ "\x6D\xE2\xD8\x80\x53\x31\x4C\xFE"
+ "\xB4\x3A\x5F\x19\xCF\x42\x1B\x22"
+ "\x0B\x2D\x7B\xF1\xC5\x43\xF7\x5E"
+ "\x12\xA8\x01\x64\x16\x0B\x26\x5A"
+ "\x0C\x95\x0F\x40\xC5\x5A\x06\x7C"
+ "\xCF\xF5\xD5\xB7\x7A\x34\x23\xB6"
+ "\xAA\x9E\xA8\x98\xA2\xF8\x3D\xD3"
+ "\x3F\x23\x69\x63\x56\x96\x45\xD6"
+ "\x74\x23\x1D\x5C\x63\xCC\xD8\x78"
+ "\x16\xE2\x9C\xD2\x80\x02\xF2\x28"
+ "\x69\x2F\xC4\xA8\x15\x15\x24\x3B"
+ "\xCB\xF0\x14\xE4\x62\xC8\xF3\xD1"
+ "\x03\x58\x1B\x33\x77\x74\x1F\xB4"
+ "\x07\x86\xF2\x21\xB7\x41\xAE\xBF"
+ "\x25\xC2\xFF\x51\xEF\xEA\xCE\xC4"
+ "\x5F\xD9\xB8\x18\x6A\xF0\x0F\x0D"
+ "\xF8\x04\xBB\x6D\x62\x33\x87\x26"
+ "\x4F\x2F\x14\x6E\xDC\xDB\x66\x09"
+ "\x2A\xEF\x7D\x84\x10\xAC\x82\x5E"
+ "\xD2\xE4\xAD\x74\x7A\x6D\xCC\x3A"
+ "\x7B\x62\xD8\xD6\x07\x2D\xF7\xDF"
+ "\x9B\xB3\x82\xCF\x9C\x1D\x76\x5C"
+ "\xAC\x7B\xD4\x9B\x45\xA1\x64\x11"
+ "\x66\xF1\xA7\x0B\xF9\xDD\x00\xDD"
+ "\xA4\x45\x3D\x3E\x03\xC9\x2E\xCB"
+ "\xC3\x14\x84\x72\xFD\x41\xDC\xBD"
+ "\x75\xBE\xA8\xE5\x16\x48\x64\x39"
+ "\xCA\xF3\xE6\xDC\x25\x24\xF1\x6D"
+ "\xB2\x8D\xC5\x38\x54\xD3\x5D\x6D"
+ "\x0B\x29\x10\x15\x0E\x13\x3B\xAC"
+ "\x7E\xCC\x9E\x3E\x18\x48\xA6\x02"
+ "\xEF\x03\xB2\x2E\xE3\xD2\x70\x21"
+ "\xB4\x19\x26\xBE\x3A\x3D\x05\xE0"
+ "\xF8\x09\xAF\xE4\x31\x26\x92\x2F"
+ "\x8F\x55\xAC\xED\x0B\xB2\xA5\x34"
+ "\xBE\x50\xB1\x02\x22\x96\xE3\x40"
+ "\x7B\x70\x50\x6E\x3B\xD5\xE5\xA0"
+ "\x8E\xA2\xAD\x14\x60\x5C\x7A\x2B"
+ "\x3D\x1B\x7F\xC1\xC0\x2C\x56\x36"
+ "\xD2\x0A\x32\x06\x97\x34\xB9\xF4"
+ "\x6F\x9F\x7E\x80\xD0\x9D\xF7\x6A"
+ "\x21\xC1\xA2\x6A\xB1\x96\x5B\x4D"
+ "\x7A\x15\x6C\xC4\x4E\xB8\xE0\x9E"
+ "\x6C\x50\xF3\x9C\xC9\xB5\x23\xB7"
+ "\xF1\xD4\x29\x4A\x23\xC4\xAD\x1E"
+ "\x2C\x07\xD2\x43\x5F\x57\x93\xCA"
+ "\x85\xF9\x9F\xAD\x4C\xF1\xE4\xB1"
+ "\x1A\x8E\x28\xA4\xB6\x52\x77\x7E"
+ "\x68\xC6\x47\xB9\x76\xCC\x65\x5F"
+ "\x0B\xF9\x67\x93\xD8\x0E\x9A\x37"
+ "\x5F\x41\xED\x64\x6C\xAD\x5F\xED"
+ "\x3F\x8D\xFB\x8E\x1E\xA0\xE4\x1F"
+ "\xC2\xC7\xED\x18\x43\xE1\x20\x86"
+ "\x5D\xBC\x30\x70\x22\xA1\xDC\x53"
+ "\x10\x3A\x8D\x47\x82\xCD\x7F\x59"
+ "\x03\x2D\x6D\xF5\xE7\x79\xD4\x07"
+ "\x68\x2A\xA5\x42\x19\x4D\xAF\xF5"
+ "\xED\x47\x83\xBC\x5F\x62\x84\xDA"
+ "\xDA\x41\xFF\xB0\x1D\x64\xA3\xC8"
+ "\xBD\x4E\xE0\xB8\x7F\xEE\x55\x0A"
+ "\x4E\x61\xB2\x51\xF6\x9C\x95\xF6"
+ "\x92\xBB\xF6\xC5\xF0\x09\x86\xDE"
+ "\x37\x9E\x29\xF9\x2A\x18\x73\x0D"
+ "\xDC\x7E\x6B\x7B\x1B\x43\x8C\xEA"
+ "\x13\xC8\x1A\x47\x0A\x2D\x6D\x56"
+ "\xCD\xD2\xE7\x53\x1A\xAB\x1C\x3C"
+ "\xC5\x9B\x03\x70\x29\x2A\x49\x09"
+ "\x67\xA1\xEA\xD6\x3A\x5B\xBF\x71"
+ "\x1D\x48\x64\x6C\xFB\xC0\x9E\x36",
+ .ilen = 1008,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE\x65\xFC\x70\x07\x9E"
+ "\x12\xA9\x40\xD7\x4B\xE2\x79\x10"
+ "\x84\x1B\xB2\x26\xBD\x54\xEB\x5F"
+ "\xF6\x8D\x01\x98\x2F\xC6\x3A\xD1"
+ "\x68\xFF\x73\x0A\xA1\x15\xAC\x43"
+ "\xDA\x4E\xE5\x7C\x13\x87\x1E\xB5"
+ "\x29\xC0\x57\xEE\x62\xF9\x90\x04"
+ "\x9B\x32\xC9\x3D\xD4\x6B\x02\x76"
+ "\x0D\xA4\x18\xAF\x46\xDD\x51\xE8"
+ "\x7F\x16\x8A\x21\xB8\x2C\xC3\x5A"
+ "\xF1\x65\xFC\x93\x07\x9E\x35\xCC"
+ "\x40\xD7\x6E\x05\x79\x10\xA7\x1B"
+ "\xB2\x49\xE0\x54\xEB\x82\x19\x8D"
+ "\x24\xBB\x2F\xC6\x5D\xF4\x68\xFF"
+ "\x96\x0A\xA1\x38\xCF\x43\xDA\x71"
+ "\x08\x7C\x13\xAA\x1E\xB5\x4C\xE3"
+ "\x57\xEE\x85\x1C\x90\x27\xBE\x32"
+ "\xC9\x60\xF7\x6B\x02\x99\x0D\xA4"
+ "\x3B\xD2\x46\xDD\x74\x0B\x7F\x16"
+ "\xAD\x21\xB8\x4F\xE6\x5A\xF1\x88"
+ "\x1F\x93\x2A\xC1\x35\xCC\x63\xFA"
+ "\x6E\x05\x9C\x10\xA7\x3E\xD5\x49"
+ "\xE0\x77\x0E\x82\x19\xB0\x24\xBB"
+ "\x52\xE9\x5D\xF4\x8B\x22\x96\x2D"
+ "\xC4\x38\xCF\x66\xFD\x71\x08\x9F"
+ "\x13\xAA\x41\xD8\x4C\xE3\x7A\x11"
+ "\x85\x1C\xB3\x27\xBE\x55\xEC\x60"
+ "\xF7\x8E\x02\x99\x30\xC7\x3B\xD2"
+ "\x69\x00\x74\x0B\xA2\x16\xAD\x44"
+ "\xDB\x4F\xE6\x7D\x14\x88\x1F\xB6"
+ "\x2A\xC1\x58\xEF\x63\xFA\x91\x05"
+ "\x9C\x33\xCA\x3E\xD5\x6C\x03\x77"
+ "\x0E\xA5\x19\xB0\x47\xDE\x52\xE9"
+ "\x80\x17\x8B\x22\xB9\x2D\xC4\x5B"
+ "\xF2\x66\xFD\x94\x08\x9F\x36\xCD"
+ "\x41\xD8\x6F\x06\x7A\x11\xA8\x1C"
+ "\xB3\x4A\xE1\x55\xEC\x83\x1A\x8E"
+ "\x25\xBC\x30\xC7\x5E\xF5\x69\x00"
+ "\x97\x0B\xA2\x39\xD0\x44\xDB\x72"
+ "\x09\x7D\x14\xAB\x1F\xB6\x4D\xE4"
+ "\x58\xEF\x86\x1D\x91\x28\xBF\x33"
+ "\xCA\x61\xF8\x6C\x03\x9A\x0E\xA5"
+ "\x3C\xD3\x47\xDE\x75\x0C\x80\x17"
+ "\xAE\x22\xB9\x50\xE7\x5B\xF2\x89"
+ "\x20\x94\x2B\xC2\x36\xCD\x64\xFB"
+ "\x6F\x06\x9D\x11\xA8\x3F\xD6\x4A"
+ "\xE1\x78\x0F\x83\x1A\xB1\x25\xBC"
+ "\x53\xEA\x5E\xF5\x8C\x00\x97\x2E"
+ "\xC5\x39\xD0\x67\xFE\x72\x09\xA0"
+ "\x14\xAB\x42\xD9\x4D\xE4\x7B\x12"
+ "\x86\x1D\xB4\x28\xBF\x56\xED\x61"
+ "\xF8\x8F\x03\x9A\x31\xC8\x3C\xD3"
+ "\x6A\x01\x75\x0C\xA3\x17\xAE\x45"
+ "\xDC\x50\xE7\x7E\x15\x89\x20\xB7"
+ "\x2B\xC2\x59\xF0\x64\xFB\x92\x06"
+ "\x9D\x34\xCB\x3F\xD6\x6D\x04\x78"
+ "\x0F\xA6\x1A\xB1\x48\xDF\x53\xEA"
+ "\x81\x18\x8C\x23\xBA\x2E\xC5\x5C"
+ "\xF3\x67\xFE\x95\x09\xA0\x37\xCE"
+ "\x42\xD9\x70\x07\x7B\x12\xA9\x1D"
+ "\xB4\x4B\xE2\x56\xED\x84\x1B\x8F"
+ "\x26\xBD\x31\xC8\x5F\xF6\x6A\x01"
+ "\x98\x0C\xA3\x3A\xD1\x45\xDC\x73"
+ "\x0A\x7E\x15\xAC\x20\xB7\x4E\xE5"
+ "\x59\xF0\x87\x1E\x92\x29\xC0\x34"
+ "\xCB\x62\xF9\x6D\x04\x9B\x0F\xA6"
+ "\x3D\xD4\x48\xDF\x76\x0D\x81\x18"
+ "\xAF\x23\xBA\x51\xE8\x5C\xF3\x8A"
+ "\x21\x95\x2C\xC3\x37\xCE\x65\xFC"
+ "\x70\x07\x9E\x12\xA9\x40\xD7\x4B"
+ "\xE2\x79\x10\x84\x1B\xB2\x26\xBD"
+ "\x54\xEB\x5F\xF6\x8D\x01\x98\x2F"
+ "\xC6\x3A\xD1\x68\xFF\x73\x0A\xA1"
+ "\x15\xAC\x43\xDA\x4E\xE5\x7C\x13"
+ "\x87\x1E\xB5\x29\xC0\x57\xEE\x62"
+ "\xF9\x90\x04\x9B\x32\xC9\x3D\xD4"
+ "\x6B\x02\x76\x0D\xA4\x18\xAF\x46"
+ "\xDD\x51\xE8\x7F\x16\x8A\x21\xB8"
+ "\x2C\xC3\x5A\xF1\x65\xFC\x93\x07"
+ "\x9E\x35\xCC\x40\xD7\x6E\x05\x79"
+ "\x10\xA7\x1B\xB2\x49\xE0\x54\xEB"
+ "\x82\x19\x8D\x24\xBB\x2F\xC6\x5D"
+ "\xF4\x68\xFF\x96\x0A\xA1\x38\xCF"
+ "\x43\xDA\x71\x08\x7C\x13\xAA\x1E"
+ "\xB5\x4C\xE3\x57\xEE\x85\x1C\x90"
+ "\x27\xBE\x32\xC9\x60\xF7\x6B\x02"
+ "\x99\x0D\xA4\x3B\xD2\x46\xDD\x74"
+ "\x0B\x7F\x16\xAD\x21\xB8\x4F\xE6"
+ "\x5A\xF1\x88\x1F\x93\x2A\xC1\x35"
+ "\xCC\x63\xFA\x6E\x05\x9C\x10\xA7"
+ "\x3E\xD5\x49\xE0\x77\x0E\x82\x19"
+ "\xB0\x24\xBB\x52\xE9\x5D\xF4\x8B"
+ "\x22\x96\x2D\xC4\x38\xCF\x66\xFD"
+ "\x71\x08\x9F\x13\xAA\x41\xD8\x4C"
+ "\xE3\x7A\x11\x85\x1C\xB3\x27\xBE"
+ "\x55\xEC\x60\xF7\x8E\x02\x99\x30"
+ "\xC7\x3B\xD2\x69\x00\x74\x0B\xA2"
+ "\x16\xAD\x44\xDB\x4F\xE6\x7D\x14"
+ "\x88\x1F\xB6\x2A\xC1\x58\xEF\x63"
+ "\xFA\x91\x05\x9C\x33\xCA\x3E\xD5"
+ "\x6C\x03\x77\x0E\xA5\x19\xB0\x47"
+ "\xDE\x52\xE9\x80\x17\x8B\x22\xB9"
+ "\x2D\xC4\x5B\xF2\x66\xFD\x94\x08"
+ "\x9F\x36\xCD\x41\xD8\x6F\x06\x7A"
+ "\x11\xA8\x1C\xB3\x4A\xE1\x55\xEC"
+ "\x83\x1A\x8E\x25\xBC\x30\xC7\x5E"
+ "\xF5\x69\x00\x97\x0B\xA2\x39\xD0"
+ "\x44\xDB\x72\x09\x7D\x14\xAB\x1F"
+ "\xB6\x4D\xE4\x58\xEF\x86\x1D\x91"
+ "\x28\xBF\x33\xCA\x61\xF8\x6C\x03"
+ "\x9A\x0E\xA5\x3C\xD3\x47\xDE\x75"
+ "\x0C\x80\x17\xAE\x22\xB9\x50\xE7"
+ "\x5B\xF2\x89\x20\x94\x2B\xC2\x36"
+ "\xCD\x64\xFB\x6F\x06\x9D\x11\xA8"
+ "\x3F\xD6\x4A\xE1\x78\x0F\x83\x1A"
+ "\xB1\x25\xBC\x53\xEA\x5E\xF5\x8C"
+ "\x00\x97\x2E\xC5\x39\xD0\x67\xFE"
+ "\x72\x09\xA0\x14\xAB\x42\xD9\x4D",
+ .rlen = 1008,
+ },
+};
+
+static struct cipher_testvec camellia_lrw_enc_tv_template[] = {
+ /* Generated from AES-LRW test vectors */
+ {
+ .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d"
+ "\x4c\x26\x84\x14\xb5\x68\x01\x85"
+ "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03"
+ "\xee\x5a\x83\x0c\xcc\x09\x4c\x87",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x92\x68\x19\xd7\xb7\x5b\x0a\x31"
+ "\x97\xcc\x72\xbe\x99\x17\xeb\x3e",
+ .rlen = 16,
+ }, {
+ .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c"
+ "\xd7\x79\xe8\x0f\x54\x88\x79\x44"
+ "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea"
+ "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x02",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x73\x09\xb7\x50\xb6\x77\x30\x50"
+ "\x5c\x8a\x9c\x26\x77\x9d\xfc\x4a",
+ .rlen = 16,
+ }, {
+ .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50"
+ "\x30\xfe\x69\xe2\x37\x7f\x98\x47"
+ "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6"
+ "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x90\xae\x83\xe0\x22\xb9\x60\x91"
+ "\xfa\xa9\xb7\x98\xe3\xed\x87\x01",
+ .rlen = 16,
+ }, {
+ .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15"
+ "\x25\x83\xf7\x3c\x1f\x01\x28\x74"
+ "\xca\xc6\xbc\x35\x4d\x4a\x65\x54"
+ "\x90\xae\x61\xcf\x7b\xae\xbd\xcc"
+ "\xad\xe4\x94\xc5\x4a\x29\xae\x70",
+ .klen = 40,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x99\xe9\x6e\xd4\xc9\x21\xa5\xf0"
+ "\xd8\x83\xef\xd9\x07\x16\x5f\x35",
+ .rlen = 16,
+ }, {
+ .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff"
+ "\xf8\x86\xce\xac\x93\xc5\xad\xc6"
+ "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd"
+ "\x52\x13\xb2\xb7\xf0\xff\x11\xd8"
+ "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f",
+ .klen = 40,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x42\x88\xf4\xcb\x21\x11\x6d\x8e"
+ "\xde\x1a\xf2\x29\xf1\x4a\xe0\x15",
+ .rlen = 16,
+ }, {
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x40\xaa\x34\x86\x4a\x8f\x78\xb9"
+ "\xdb\xdb\x0f\x3d\x48\x70\xbe\x8d",
+ .rlen = 16,
+ }, {
+ .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d"
+ "\xd4\x70\x98\x0b\xc7\x95\x84\xc8"
+ "\xb2\xfb\x64\xce\x60\x97\x87\x8d"
+ "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7"
+ "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4"
+ "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .ilen = 16,
+ .result = "\x04\xab\x28\x37\x31\x7a\x26\xab"
+ "\xa1\x70\x1b\x9c\xe7\xdd\x83\xff",
+ .rlen = 16,
+ }, {
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x05\x11\xb7\x18\xab\xc6\x2d\xac"
+ "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c"
+ "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8"
+ "\x50\x38\x1f\x71\x49\xb6\x57\xd6"
+ "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90"
+ "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6"
+ "\xad\x1e\x9e\x20\x5f\x38\xbe\x04"
+ "\xda\x10\x8e\xed\xa2\xa4\x87\xab"
+ "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c"
+ "\xc9\xac\x42\x31\x95\x7c\xc9\x04"
+ "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6"
+ "\x15\xd7\x3f\x4f\x2f\x66\x69\x03"
+ "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65"
+ "\x4c\x96\x12\xed\x7c\x92\x03\x01"
+ "\x6f\xbc\x35\x93\xac\xf1\x27\xf1"
+ "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50"
+ "\x89\xa4\x8e\x66\x44\x85\xcc\xfd"
+ "\x33\x14\x70\xe3\x96\xb2\xc3\xd3"
+ "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5"
+ "\x2d\x64\x75\xdd\xb4\x54\xe6\x74"
+ "\x8c\xd3\x9d\x9e\x86\xab\x51\x53"
+ "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40"
+ "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5"
+ "\x76\x12\x73\x44\x1a\x56\xd7\x72"
+ "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda"
+ "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd"
+ "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60"
+ "\x1a\xe2\x70\x85\x58\xc2\x1b\x09"
+ "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9"
+ "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8"
+ "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8"
+ "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10"
+ "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1"
+ "\x90\x3e\x76\x4a\x74\xa4\x21\x2c"
+ "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e"
+ "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f"
+ "\x8d\x23\x31\x74\x84\xeb\x88\x6e"
+ "\xcc\xb9\xbc\x22\x83\x19\x07\x22"
+ "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78"
+ "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5"
+ "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41"
+ "\x3c\xce\x8f\x42\x60\x71\xa7\x75"
+ "\x08\x40\x65\x8a\x82\xbf\xf5\x43"
+ "\x71\x96\xa9\x4d\x44\x8a\x20\xbe"
+ "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65"
+ "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9"
+ "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4"
+ "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a"
+ "\x62\x73\x65\xfd\x46\x63\x25\x3d"
+ "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf"
+ "\x24\xf3\xb4\xac\x64\xba\xdf\x4b"
+ "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7"
+ "\xc5\x68\x77\x84\x32\x2b\xcc\x85"
+ "\x74\x96\xf0\x12\x77\x61\xb9\xeb"
+ "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8"
+ "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24"
+ "\xda\x39\x87\x45\xc0\x2b\xbb\x01"
+ "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce"
+ "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6"
+ "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32"
+ "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45"
+ "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6"
+ "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4"
+ "\x21\xc4\xc2\x75\x67\x89\x37\x0a",
+ .ilen = 512,
+ .result = "\x90\x69\x8e\xf2\x14\x86\x59\xf9"
+ "\xec\xe7\xfa\x3f\x48\x9d\x7f\x96"
+ "\x67\x76\xac\x2c\xd2\x63\x18\x93"
+ "\x13\xf8\xf1\xf6\x71\x77\xb3\xee"
+ "\x93\xb2\xcc\xf3\x26\xc1\x16\x4f"
+ "\xd4\xe8\x43\xc1\x68\xa3\x3e\x06"
+ "\x38\x51\xff\xa8\xb9\xa4\xeb\xb1"
+ "\x62\xdd\x78\x81\xea\x1d\xef\x04"
+ "\x1d\x07\xc1\x67\xc8\xd6\x77\xa1"
+ "\x84\x95\xf4\x9a\xd9\xbc\x2d\xe2"
+ "\xf6\x80\xfc\x91\x2a\xbc\x42\xa0"
+ "\x40\x41\x69\xaa\x71\xc0\x37\xec"
+ "\x39\xf3\xf2\xec\x82\xc3\x88\x79"
+ "\xbc\xc3\xaa\xb7\xcf\x6a\x72\x80"
+ "\x4c\xf4\x84\x8f\x13\x9e\x94\x5c"
+ "\xe5\xb2\x91\xbb\x92\x51\x4d\xf1"
+ "\xd6\x0d\x71\x6b\x7a\xc2\x2f\x12"
+ "\x6f\x75\xc7\x80\x99\x50\x84\xcf"
+ "\xa8\xeb\xd6\xe1\x1c\x59\x81\x7e"
+ "\xb9\xb3\xde\x7a\x93\x14\x12\xa2"
+ "\xf7\x43\xb3\x9d\x1a\x87\x65\x91"
+ "\x42\x08\x40\x82\x06\x1c\x2d\x55"
+ "\x6e\x48\xd5\x74\x07\x6e\x9d\x80"
+ "\xeb\xb4\x97\xa1\x36\xdf\xfa\x74"
+ "\x79\x7f\x5a\x75\xe7\x71\xc8\x8c"
+ "\x7e\xf8\x3a\x77\xcd\x32\x05\xf9"
+ "\x3d\xd4\xe9\xa2\xbb\xc4\x8b\x83"
+ "\x42\x5c\x82\xfa\xe9\x4b\x96\x3b"
+ "\x7f\x89\x8b\xf9\xf1\x87\xda\xf0"
+ "\x87\xef\x13\x5d\xf0\xe2\xc5\xc1"
+ "\xed\x14\xa9\x57\x19\x63\x40\x04"
+ "\x24\xeb\x6e\x19\xd1\x3d\x70\x78"
+ "\xeb\xda\x55\x70\x2c\x4f\x41\x5b"
+ "\x56\x9f\x1a\xd3\xac\xf1\xc0\xc3"
+ "\x21\xec\xd7\xd2\x55\x32\x7c\x2e"
+ "\x3c\x48\x8e\xb4\x85\x35\x47\xfe"
+ "\xe2\x88\x79\x98\x6a\xc9\x8d\xff"
+ "\xe9\x89\x6e\xb8\xe2\x97\x00\xbd"
+ "\xa4\x8f\xba\xd0\x8c\xcb\x79\x99"
+ "\xb3\xb2\xb2\x7a\xc3\xb7\xef\x75"
+ "\x23\x52\x76\xc3\x50\x6e\x66\xf8"
+ "\xa2\xe2\xce\xba\x40\x21\x3f\xc9"
+ "\x0a\x32\x7f\xf7\x08\x8c\x66\xcf"
+ "\xd3\xdf\x57\x59\x83\xb8\xe1\x85"
+ "\xd6\x8f\xfb\x48\x1f\x3a\xc4\x2f"
+ "\xb4\x2d\x58\xab\xd8\x7f\x5e\x3a"
+ "\xbc\x62\x3e\xe2\x6a\x52\x0d\x76"
+ "\x2f\x1c\x1a\x30\xed\x95\x2a\x44"
+ "\x35\xa5\x83\x04\x84\x01\x99\x56"
+ "\xb7\xe3\x10\x96\xfa\xdc\x19\xdd"
+ "\xe2\x7f\xcb\xa0\x49\x1b\xff\x4c"
+ "\x73\xf6\xbb\x94\x00\xe8\xa9\x3d"
+ "\xe2\x20\xe9\x3f\xfa\x07\x5d\x77"
+ "\x06\xd5\x4f\x4d\x02\xb8\x40\x1b"
+ "\x30\xed\x1a\x50\x19\xef\xc4\x2c"
+ "\x02\xd9\xc5\xd3\x11\x33\x37\xe5"
+ "\x2b\xa3\x95\xa6\xee\xd8\x74\x1d"
+ "\x68\xa0\xeb\xbf\xdd\x5e\x99\x96"
+ "\x91\xc3\x94\x24\xa5\x12\xa2\x37"
+ "\xb3\xac\xcf\x2a\xfd\x55\x34\xfe"
+ "\x79\x92\x3e\xe6\x1b\x49\x57\x5d"
+ "\x93\x6c\x01\xf7\xcc\x4e\x20\xd1"
+ "\xb2\x1a\xd8\x4c\xbd\x1d\x10\xe9"
+ "\x5a\xa8\x92\x7f\xba\xe6\x0c\x95",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec camellia_lrw_dec_tv_template[] = {
+ /* Generated from AES-LRW test vectors */
+ /* same as enc vectors with input and result reversed */
+ {
+ .key = "\x45\x62\xac\x25\xf8\x28\x17\x6d"
+ "\x4c\x26\x84\x14\xb5\x68\x01\x85"
+ "\x25\x8e\x2a\x05\xe7\x3e\x9d\x03"
+ "\xee\x5a\x83\x0c\xcc\x09\x4c\x87",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x92\x68\x19\xd7\xb7\x5b\x0a\x31"
+ "\x97\xcc\x72\xbe\x99\x17\xeb\x3e",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\x59\x70\x47\x14\xf5\x57\x47\x8c"
+ "\xd7\x79\xe8\x0f\x54\x88\x79\x44"
+ "\x0d\x48\xf0\xb7\xb1\x5a\x53\xea"
+ "\x1c\xaa\x6b\x29\xc2\xca\xfb\xaf",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x02",
+ .input = "\x73\x09\xb7\x50\xb6\x77\x30\x50"
+ "\x5c\x8a\x9c\x26\x77\x9d\xfc\x4a",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\xd8\x2a\x91\x34\xb2\x6a\x56\x50"
+ "\x30\xfe\x69\xe2\x37\x7f\x98\x47"
+ "\xcd\xf9\x0b\x16\x0c\x64\x8f\xb6"
+ "\xb0\x0d\x0d\x1b\xae\x85\x87\x1f",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x90\xae\x83\xe0\x22\xb9\x60\x91"
+ "\xfa\xa9\xb7\x98\xe3\xed\x87\x01",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\x0f\x6a\xef\xf8\xd3\xd2\xbb\x15"
+ "\x25\x83\xf7\x3c\x1f\x01\x28\x74"
+ "\xca\xc6\xbc\x35\x4d\x4a\x65\x54"
+ "\x90\xae\x61\xcf\x7b\xae\xbd\xcc"
+ "\xad\xe4\x94\xc5\x4a\x29\xae\x70",
+ .klen = 40,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x99\xe9\x6e\xd4\xc9\x21\xa5\xf0"
+ "\xd8\x83\xef\xd9\x07\x16\x5f\x35",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\x8a\xd4\xee\x10\x2f\xbd\x81\xff"
+ "\xf8\x86\xce\xac\x93\xc5\xad\xc6"
+ "\xa0\x19\x07\xc0\x9d\xf7\xbb\xdd"
+ "\x52\x13\xb2\xb7\xf0\xff\x11\xd8"
+ "\xd6\x08\xd0\xcd\x2e\xb1\x17\x6f",
+ .klen = 40,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x42\x88\xf4\xcb\x21\x11\x6d\x8e"
+ "\xde\x1a\xf2\x29\xf1\x4a\xe0\x15",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x40\xaa\x34\x86\x4a\x8f\x78\xb9"
+ "\xdb\xdb\x0f\x3d\x48\x70\xbe\x8d",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\xfb\x76\x15\xb2\x3d\x80\x89\x1d"
+ "\xd4\x70\x98\x0b\xc7\x95\x84\xc8"
+ "\xb2\xfb\x64\xce\x60\x97\x87\x8d"
+ "\x17\xfc\xe4\x5a\x49\xe8\x30\xb7"
+ "\x6e\x78\x17\xe7\x2d\x5e\x12\xd4"
+ "\x60\x64\x04\x7a\xf1\x2f\x9e\x0c",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x02\x00\x00\x00\x00",
+ .input = "\x04\xab\x28\x37\x31\x7a\x26\xab"
+ "\xa1\x70\x1b\x9c\xe7\xdd\x83\xff",
+ .ilen = 16,
+ .result = "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x41\x42\x43\x44\x45\x46",
+ .rlen = 16,
+ }, {
+ .key = "\xf8\xd4\x76\xff\xd6\x46\xee\x6c"
+ "\x23\x84\xcb\x1c\x77\xd6\x19\x5d"
+ "\xfe\xf1\xa9\xf3\x7b\xbc\x8d\x21"
+ "\xa7\x9c\x21\xf8\xcb\x90\x02\x89"
+ "\xa8\x45\x34\x8e\xc8\xc5\xb5\xf1"
+ "\x26\xf5\x0e\x76\xfe\xfd\x1b\x1e",
+ .klen = 48,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x01",
+ .input = "\x90\x69\x8e\xf2\x14\x86\x59\xf9"
+ "\xec\xe7\xfa\x3f\x48\x9d\x7f\x96"
+ "\x67\x76\xac\x2c\xd2\x63\x18\x93"
+ "\x13\xf8\xf1\xf6\x71\x77\xb3\xee"
+ "\x93\xb2\xcc\xf3\x26\xc1\x16\x4f"
+ "\xd4\xe8\x43\xc1\x68\xa3\x3e\x06"
+ "\x38\x51\xff\xa8\xb9\xa4\xeb\xb1"
+ "\x62\xdd\x78\x81\xea\x1d\xef\x04"
+ "\x1d\x07\xc1\x67\xc8\xd6\x77\xa1"
+ "\x84\x95\xf4\x9a\xd9\xbc\x2d\xe2"
+ "\xf6\x80\xfc\x91\x2a\xbc\x42\xa0"
+ "\x40\x41\x69\xaa\x71\xc0\x37\xec"
+ "\x39\xf3\xf2\xec\x82\xc3\x88\x79"
+ "\xbc\xc3\xaa\xb7\xcf\x6a\x72\x80"
+ "\x4c\xf4\x84\x8f\x13\x9e\x94\x5c"
+ "\xe5\xb2\x91\xbb\x92\x51\x4d\xf1"
+ "\xd6\x0d\x71\x6b\x7a\xc2\x2f\x12"
+ "\x6f\x75\xc7\x80\x99\x50\x84\xcf"
+ "\xa8\xeb\xd6\xe1\x1c\x59\x81\x7e"
+ "\xb9\xb3\xde\x7a\x93\x14\x12\xa2"
+ "\xf7\x43\xb3\x9d\x1a\x87\x65\x91"
+ "\x42\x08\x40\x82\x06\x1c\x2d\x55"
+ "\x6e\x48\xd5\x74\x07\x6e\x9d\x80"
+ "\xeb\xb4\x97\xa1\x36\xdf\xfa\x74"
+ "\x79\x7f\x5a\x75\xe7\x71\xc8\x8c"
+ "\x7e\xf8\x3a\x77\xcd\x32\x05\xf9"
+ "\x3d\xd4\xe9\xa2\xbb\xc4\x8b\x83"
+ "\x42\x5c\x82\xfa\xe9\x4b\x96\x3b"
+ "\x7f\x89\x8b\xf9\xf1\x87\xda\xf0"
+ "\x87\xef\x13\x5d\xf0\xe2\xc5\xc1"
+ "\xed\x14\xa9\x57\x19\x63\x40\x04"
+ "\x24\xeb\x6e\x19\xd1\x3d\x70\x78"
+ "\xeb\xda\x55\x70\x2c\x4f\x41\x5b"
+ "\x56\x9f\x1a\xd3\xac\xf1\xc0\xc3"
+ "\x21\xec\xd7\xd2\x55\x32\x7c\x2e"
+ "\x3c\x48\x8e\xb4\x85\x35\x47\xfe"
+ "\xe2\x88\x79\x98\x6a\xc9\x8d\xff"
+ "\xe9\x89\x6e\xb8\xe2\x97\x00\xbd"
+ "\xa4\x8f\xba\xd0\x8c\xcb\x79\x99"
+ "\xb3\xb2\xb2\x7a\xc3\xb7\xef\x75"
+ "\x23\x52\x76\xc3\x50\x6e\x66\xf8"
+ "\xa2\xe2\xce\xba\x40\x21\x3f\xc9"
+ "\x0a\x32\x7f\xf7\x08\x8c\x66\xcf"
+ "\xd3\xdf\x57\x59\x83\xb8\xe1\x85"
+ "\xd6\x8f\xfb\x48\x1f\x3a\xc4\x2f"
+ "\xb4\x2d\x58\xab\xd8\x7f\x5e\x3a"
+ "\xbc\x62\x3e\xe2\x6a\x52\x0d\x76"
+ "\x2f\x1c\x1a\x30\xed\x95\x2a\x44"
+ "\x35\xa5\x83\x04\x84\x01\x99\x56"
+ "\xb7\xe3\x10\x96\xfa\xdc\x19\xdd"
+ "\xe2\x7f\xcb\xa0\x49\x1b\xff\x4c"
+ "\x73\xf6\xbb\x94\x00\xe8\xa9\x3d"
+ "\xe2\x20\xe9\x3f\xfa\x07\x5d\x77"
+ "\x06\xd5\x4f\x4d\x02\xb8\x40\x1b"
+ "\x30\xed\x1a\x50\x19\xef\xc4\x2c"
+ "\x02\xd9\xc5\xd3\x11\x33\x37\xe5"
+ "\x2b\xa3\x95\xa6\xee\xd8\x74\x1d"
+ "\x68\xa0\xeb\xbf\xdd\x5e\x99\x96"
+ "\x91\xc3\x94\x24\xa5\x12\xa2\x37"
+ "\xb3\xac\xcf\x2a\xfd\x55\x34\xfe"
+ "\x79\x92\x3e\xe6\x1b\x49\x57\x5d"
+ "\x93\x6c\x01\xf7\xcc\x4e\x20\xd1"
+ "\xb2\x1a\xd8\x4c\xbd\x1d\x10\xe9"
+ "\x5a\xa8\x92\x7f\xba\xe6\x0c\x95",
+ .ilen = 512,
+ .result = "\x05\x11\xb7\x18\xab\xc6\x2d\xac"
+ "\x70\x5d\xf6\x22\x94\xcd\xe5\x6c"
+ "\x17\x6b\xf6\x1c\xf0\xf3\x6e\xf8"
+ "\x50\x38\x1f\x71\x49\xb6\x57\xd6"
+ "\x8f\xcb\x8d\x6b\xe3\xa6\x29\x90"
+ "\xfe\x2a\x62\x82\xae\x6d\x8b\xf6"
+ "\xad\x1e\x9e\x20\x5f\x38\xbe\x04"
+ "\xda\x10\x8e\xed\xa2\xa4\x87\xab"
+ "\xda\x6b\xb4\x0c\x75\xba\xd3\x7c"
+ "\xc9\xac\x42\x31\x95\x7c\xc9\x04"
+ "\xeb\xd5\x6e\x32\x69\x8a\xdb\xa6"
+ "\x15\xd7\x3f\x4f\x2f\x66\x69\x03"
+ "\x9c\x1f\x54\x0f\xde\x1f\xf3\x65"
+ "\x4c\x96\x12\xed\x7c\x92\x03\x01"
+ "\x6f\xbc\x35\x93\xac\xf1\x27\xf1"
+ "\xb4\x96\x82\x5a\x5f\xb0\xa0\x50"
+ "\x89\xa4\x8e\x66\x44\x85\xcc\xfd"
+ "\x33\x14\x70\xe3\x96\xb2\xc3\xd3"
+ "\xbb\x54\x5a\x1a\xf9\x74\xa2\xc5"
+ "\x2d\x64\x75\xdd\xb4\x54\xe6\x74"
+ "\x8c\xd3\x9d\x9e\x86\xab\x51\x53"
+ "\xb7\x93\x3e\x6f\xd0\x4e\x2c\x40"
+ "\xf6\xa8\x2e\x3e\x9d\xf4\x66\xa5"
+ "\x76\x12\x73\x44\x1a\x56\xd7\x72"
+ "\x88\xcd\x21\x8c\x4c\x0f\xfe\xda"
+ "\x95\xe0\x3a\xa6\xa5\x84\x46\xcd"
+ "\xd5\x3e\x9d\x3a\xe2\x67\xe6\x60"
+ "\x1a\xe2\x70\x85\x58\xc2\x1b\x09"
+ "\xe1\xd7\x2c\xca\xad\xa8\x8f\xf9"
+ "\xac\xb3\x0e\xdb\xca\x2e\xe2\xb8"
+ "\x51\x71\xd9\x3c\x6c\xf1\x56\xf8"
+ "\xea\x9c\xf1\xfb\x0c\xe6\xb7\x10"
+ "\x1c\xf8\xa9\x7c\xe8\x53\x35\xc1"
+ "\x90\x3e\x76\x4a\x74\xa4\x21\x2c"
+ "\xf6\x2c\x4e\x0f\x94\x3a\x88\x2e"
+ "\x41\x09\x6a\x33\x7d\xf6\xdd\x3f"
+ "\x8d\x23\x31\x74\x84\xeb\x88\x6e"
+ "\xcc\xb9\xbc\x22\x83\x19\x07\x22"
+ "\xa5\x2d\xdf\xa5\xf3\x80\x85\x78"
+ "\x84\x39\x6a\x6d\x6a\x99\x4f\xa5"
+ "\x15\xfe\x46\xb0\xe4\x6c\xa5\x41"
+ "\x3c\xce\x8f\x42\x60\x71\xa7\x75"
+ "\x08\x40\x65\x8a\x82\xbf\xf5\x43"
+ "\x71\x96\xa9\x4d\x44\x8a\x20\xbe"
+ "\xfa\x4d\xbb\xc0\x7d\x31\x96\x65"
+ "\xe7\x75\xe5\x3e\xfd\x92\x3b\xc9"
+ "\x55\xbb\x16\x7e\xf7\xc2\x8c\xa4"
+ "\x40\x1d\xe5\xef\x0e\xdf\xe4\x9a"
+ "\x62\x73\x65\xfd\x46\x63\x25\x3d"
+ "\x2b\xaf\xe5\x64\xfe\xa5\x5c\xcf"
+ "\x24\xf3\xb4\xac\x64\xba\xdf\x4b"
+ "\xc6\x96\x7d\x81\x2d\x8d\x97\xf7"
+ "\xc5\x68\x77\x84\x32\x2b\xcc\x85"
+ "\x74\x96\xf0\x12\x77\x61\xb9\xeb"
+ "\x71\xaa\x82\xcb\x1c\xdb\x89\xc8"
+ "\xc6\xb5\xe3\x5c\x7d\x39\x07\x24"
+ "\xda\x39\x87\x45\xc0\x2b\xbb\x01"
+ "\xac\xbc\x2a\x5c\x7f\xfc\xe8\xce"
+ "\x6d\x9c\x6f\xed\xd3\xc1\xa1\xd6"
+ "\xc5\x55\xa9\x66\x2f\xe1\xc8\x32"
+ "\xa6\x5d\xa4\x3a\x98\x73\xe8\x45"
+ "\xa4\xc7\xa8\xb4\xf6\x13\x03\xf6"
+ "\xe9\x2e\xc4\x29\x0f\x84\xdb\xc4"
+ "\x21\xc4\xc2\x75\x67\x89\x37\x0a",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec camellia_xts_enc_tv_template[] = {
+ /* Generated from AES-XTS test vectors */
+ {
+ .key = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .ilen = 32,
+ .result = "\x06\xcb\xa5\xf1\x04\x63\xb2\x41"
+ "\xdc\xca\xfa\x09\xba\x74\xb9\x05"
+ "\x78\xba\xa4\xf8\x67\x4d\x7e\xad"
+ "\x20\x18\xf5\x0c\x41\x16\x2a\x61",
+ .rlen = 32,
+ }, {
+ .key = "\x11\x11\x11\x11\x11\x11\x11\x11"
+ "\x11\x11\x11\x11\x11\x11\x11\x11"
+ "\x22\x22\x22\x22\x22\x22\x22\x22"
+ "\x22\x22\x22\x22\x22\x22\x22\x22",
+ .klen = 32,
+ .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44",
+ .ilen = 32,
+ .result = "\xc2\xb9\xdc\x44\x1d\xdf\xf2\x86"
+ "\x8d\x35\x42\x0a\xa5\x5e\x3d\x4f"
+ "\xb5\x37\x06\xff\xbd\xd4\x91\x70"
+ "\x80\x1f\xb2\x39\x10\x89\x44\xf5",
+ .rlen = 32,
+ }, {
+ .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8"
+ "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0"
+ "\x22\x22\x22\x22\x22\x22\x22\x22"
+ "\x22\x22\x22\x22\x22\x22\x22\x22",
+ .klen = 32,
+ .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44",
+ .ilen = 32,
+ .result = "\x52\x1f\x9d\xf5\x5a\x58\x5a\x7e"
+ "\x9f\xd0\x8e\x02\x9c\x9a\x6a\xa7"
+ "\xb4\x3b\xce\xe7\x17\xaa\x89\x6a"
+ "\x35\x3c\x6b\xb5\x61\x1c\x79\x38",
+ .rlen = 32,
+ }, {
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .ilen = 512,
+ .result = "\xc7\xf9\x0a\xaa\xcb\xb5\x8f\x33"
+ "\x60\xc3\xe9\x47\x90\xb7\x50\x57"
+ "\xa3\xad\x81\x2f\xf5\x22\x96\x02"
+ "\xaa\x7f\xea\xac\x29\x78\xca\x2a"
+ "\x7c\xcd\x31\x1a\x3c\x40\x0a\x73"
+ "\x09\x66\xad\x72\x0e\x4d\x5d\x77"
+ "\xbc\xb8\x76\x80\x37\x59\xa9\x01"
+ "\x9e\xfb\xdb\x6c\x93\xef\xb6\x8d"
+ "\x1e\xc1\x94\xa8\xd4\xb5\xb0\x01"
+ "\xd5\x01\x97\x28\xcd\x7a\x1f\xe8"
+ "\x08\xda\x76\x00\x65\xcf\x7b\x31"
+ "\xc6\xfa\xf2\x3b\x00\xa7\x6a\x9e"
+ "\x6c\x43\x80\x87\xe0\xbb\x4e\xe5"
+ "\xdc\x8a\xdf\xc3\x1d\x1b\x41\x04"
+ "\xfb\x54\xdd\x29\x27\xc2\x65\x17"
+ "\x36\x88\xb0\x85\x8d\x73\x7e\x4b"
+ "\x1d\x16\x8a\x52\xbc\xa6\xbc\xa4"
+ "\x8c\xd1\x04\x16\xbf\x8c\x01\x0f"
+ "\x7e\x6b\x59\x15\x29\xd1\x9b\xd3"
+ "\x6c\xee\xac\xdc\x45\x58\xca\x5b"
+ "\x70\x0e\x6a\x12\x86\x82\x79\x9f"
+ "\x16\xd4\x9d\x67\xcd\x70\x65\x26"
+ "\x21\x72\x1e\xa1\x94\x8a\x83\x0c"
+ "\x92\x42\x58\x5e\xa2\xc5\x31\xf3"
+ "\x7b\xd1\x31\xd4\x15\x80\x31\x61"
+ "\x5c\x53\x10\xdd\xea\xc8\x83\x5c"
+ "\x7d\xa7\x05\x66\xcc\x1e\xbb\x05"
+ "\x47\xae\xb4\x0f\x84\xd8\xf6\xb5"
+ "\xa1\xc6\x52\x00\x52\xe8\xdc\xd9"
+ "\x16\x31\xb2\x47\x91\x67\xaa\x28"
+ "\x2c\x29\x85\xa3\xf7\xf2\x24\x93"
+ "\x23\x80\x1f\xa8\x1b\x82\x8d\xdc"
+ "\x9f\x0b\xcd\xb4\x3c\x20\xbc\xec"
+ "\x4f\xc7\xee\xf8\xfd\xd9\xfb\x7e"
+ "\x3f\x0d\x23\xfa\x3f\xa7\xcc\x66"
+ "\x1c\xfe\xa6\x86\xf6\xf7\x85\xc7"
+ "\x43\xc1\xd4\xfc\xe4\x79\xc9\x1d"
+ "\xf8\x89\xcd\x20\x27\x84\x5d\x5c"
+ "\x8e\x4f\x1f\xeb\x08\x21\x4f\xa3"
+ "\xe0\x7e\x0b\x9c\xe7\x42\xcf\xb7"
+ "\x3f\x43\xcc\x86\x71\x34\x6a\xd9"
+ "\x5e\xec\x8f\x36\xc9\x0a\x03\xfe"
+ "\x18\x41\xdc\x9e\x2e\x75\x20\x3e"
+ "\xcc\x77\xe0\x8f\xe8\x43\x37\x4c"
+ "\xed\x1a\x5a\xb3\xfa\x43\xc9\x71"
+ "\x9f\xc5\xce\xcf\xff\xe7\x77\x1e"
+ "\x35\x93\xde\x6b\xc0\x6a\x7e\xa9"
+ "\x34\xb8\x27\x74\x08\xda\xf2\x4a"
+ "\x23\x5b\x9f\x55\x3a\x57\x82\x52"
+ "\xea\x6d\xc3\xc7\xf2\xc8\xb5\xdc"
+ "\xc5\xb9\xbb\xaa\xf2\x29\x9f\x49"
+ "\x7a\xef\xfe\xdc\x9f\xc9\x28\xe2"
+ "\x96\x0b\x35\x84\x05\x0d\xd6\x2a"
+ "\xea\x5a\xbf\x69\xde\xee\x4f\x8f"
+ "\x84\xb9\xcf\xa7\x57\xea\xe0\xe8"
+ "\x96\xef\x0f\x0e\xec\xc7\xa6\x74"
+ "\xb1\xfe\x7a\x6d\x11\xdd\x0e\x15"
+ "\x4a\x1e\x73\x7f\x55\xea\xf6\xe1"
+ "\x5b\xb6\x71\xda\xb0\x0c\xba\x26"
+ "\x5c\x48\x38\x6d\x1c\x32\xb2\x7d"
+ "\x05\x87\xc2\x1e\x7e\x2d\xd4\x33"
+ "\xcc\x06\xdb\xe7\x82\x29\x63\xd1"
+ "\x52\x84\x4f\xee\x27\xe8\x02\xd4"
+ "\x34\x3c\x69\xc2\xbd\x20\xe6\x7a",
+ .rlen = 512,
+ }, {
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x62\x49\x77\x57\x24\x70\x93\x69"
+ "\x99\x59\x57\x49\x66\x96\x76\x27"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95"
+ "\x02\x88\x41\x97\x16\x93\x99\x37"
+ "\x51\x05\x82\x09\x74\x94\x45\x92",
+ .klen = 64,
+ .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .ilen = 512,
+ .result = "\x49\xcd\xb8\xbf\x2f\x73\x37\x28"
+ "\x9a\x7f\x6e\x57\x55\xb8\x07\x88"
+ "\x4a\x0d\x8b\x55\x60\xed\xb6\x7b"
+ "\xf1\x74\xac\x96\x05\x7b\x32\xca"
+ "\xd1\x4e\xf1\x58\x29\x16\x24\x6c"
+ "\xf2\xb3\xe4\x88\x84\xac\x4d\xee"
+ "\x97\x07\x82\xf0\x07\x12\x38\x0a"
+ "\x67\x62\xaf\xfd\x85\x9f\x0a\x55"
+ "\xa5\x20\xc5\x60\xe4\x68\x53\xa4"
+ "\x0e\x2e\x65\xe3\xe4\x0c\x30\x7c"
+ "\x1c\x01\x4f\x55\xa9\x13\xeb\x25"
+ "\x21\x87\xbc\xd3\xe7\x67\x4f\x38"
+ "\xa8\x14\x25\x71\xe9\x2e\x4c\x21"
+ "\x41\x82\x0c\x45\x39\x35\xa8\x75"
+ "\x03\x29\x01\x84\x8c\xab\x48\xbe"
+ "\x11\x56\x22\x67\xb7\x67\x1a\x09"
+ "\xa1\x72\x25\x41\x3c\x39\x65\x80"
+ "\x7d\x2f\xf8\x2c\x73\x04\x58\x9d"
+ "\xdd\x16\x8b\x63\x70\x4e\xc5\x17"
+ "\x21\xe0\x84\x51\x4b\x6f\x05\x52"
+ "\xe3\x63\x34\xfa\xa4\xaf\x33\x20"
+ "\xc1\xae\x32\xc4\xb8\x2b\xdb\x76"
+ "\xd9\x02\x31\x2f\xa3\xc6\xd0\x7b"
+ "\xaf\x1b\x84\xe3\x9b\xbf\xa6\xe0"
+ "\xb8\x8a\x13\x88\x71\xf4\x11\xa5"
+ "\xe9\xa9\x10\x33\xe0\xbe\x49\x89"
+ "\x41\x22\xf5\x9d\x80\x3e\x3b\x76"
+ "\x01\x16\x50\x6e\x7c\x6a\x81\xe9"
+ "\x13\x2c\xde\xb2\x5f\x79\xba\xb2"
+ "\xb1\x75\xae\xd2\x07\x98\x4b\x69"
+ "\xae\x7d\x5b\x90\xc2\x6c\xe6\x98"
+ "\xd3\x4c\xa1\xa3\x9c\xc9\x33\x6a"
+ "\x0d\x23\xb1\x79\x25\x13\x4b\xe5"
+ "\xaf\x93\x20\x5c\x7f\x06\x7a\x34"
+ "\x0b\x78\xe3\x67\x26\xe0\xad\x95"
+ "\xc5\x4e\x26\x22\xcf\x73\x77\x62"
+ "\x3e\x10\xd7\x90\x4b\x52\x1c\xc9"
+ "\xef\x38\x52\x18\x0e\x29\x7e\xef"
+ "\x34\xfe\x31\x95\xc5\xbc\xa8\xe2"
+ "\xa8\x4e\x9f\xea\xa6\xf0\xfe\x5d"
+ "\xc5\x39\x86\xed\x2f\x6d\xa0\xfe"
+ "\x96\xcd\x41\x10\x78\x4e\x0c\xc9"
+ "\xc3\x6d\x0f\xb7\xe8\xe0\x62\xab"
+ "\x8b\xf1\x21\x89\xa1\x12\xaa\xfa"
+ "\x9d\x70\xbe\x4c\xa8\x98\x89\x01"
+ "\xb9\xe2\x61\xde\x0c\x4a\x0b\xaa"
+ "\x89\xf5\x14\x79\x18\x8f\x3b\x0d"
+ "\x21\x17\xf8\x59\x15\x24\x64\x22"
+ "\x57\x48\x80\xd5\x3d\x92\x30\x07"
+ "\xd9\xa1\x4a\x23\x16\x43\x48\x0e"
+ "\x2b\x2d\x1b\x87\xef\x7e\xbd\xfa"
+ "\x49\xbc\x7e\x68\x6e\xa8\x46\x95"
+ "\xad\x5e\xfe\x0a\xa8\xd3\x1a\x5d"
+ "\x6b\x84\xf3\x00\xba\x52\x05\x02"
+ "\xe3\x96\x4e\xb6\x79\x3f\x43\xd3"
+ "\x4d\x3f\xd6\xab\x0a\xc4\x75\x2d"
+ "\xd1\x08\xc3\x6a\xc8\x37\x29\xa0"
+ "\xcc\x9a\x05\xdd\x5c\xe1\xff\x66"
+ "\xf2\x7a\x1d\xf2\xaf\xa9\x48\x89"
+ "\xf5\x21\x0f\x02\x48\x83\x74\xbf"
+ "\x2e\xe6\x93\x7b\xa0\xf4\xb1\x2b"
+ "\xb1\x02\x0a\x5c\x79\x19\x3b\x75"
+ "\xb7\x16\xd8\x12\x5c\xcd\x7d\x4e"
+ "\xd5\xc6\x99\xcc\x4e\x6c\x94\x95",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
+ },
+};
+
+static struct cipher_testvec camellia_xts_dec_tv_template[] = {
+ /* Generated from AES-XTS test vectors */
+ /* same as enc vectors with input and result reversed */
+ {
+ .key = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x06\xcb\xa5\xf1\x04\x63\xb2\x41"
+ "\xdc\xca\xfa\x09\xba\x74\xb9\x05"
+ "\x78\xba\xa4\xf8\x67\x4d\x7e\xad"
+ "\x20\x18\xf5\x0c\x41\x16\x2a\x61",
+ .ilen = 32,
+ .result = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .rlen = 32,
+ }, {
+ .key = "\x11\x11\x11\x11\x11\x11\x11\x11"
+ "\x11\x11\x11\x11\x11\x11\x11\x11"
+ "\x22\x22\x22\x22\x22\x22\x22\x22"
+ "\x22\x22\x22\x22\x22\x22\x22\x22",
+ .klen = 32,
+ .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\xc2\xb9\xdc\x44\x1d\xdf\xf2\x86"
+ "\x8d\x35\x42\x0a\xa5\x5e\x3d\x4f"
+ "\xb5\x37\x06\xff\xbd\xd4\x91\x70"
+ "\x80\x1f\xb2\x39\x10\x89\x44\xf5",
+ .ilen = 32,
+ .result = "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44",
+ .rlen = 32,
+ }, {
+ .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8"
+ "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0"
+ "\x22\x22\x22\x22\x22\x22\x22\x22"
+ "\x22\x22\x22\x22\x22\x22\x22\x22",
+ .klen = 32,
+ .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x52\x1f\x9d\xf5\x5a\x58\x5a\x7e"
+ "\x9f\xd0\x8e\x02\x9c\x9a\x6a\xa7"
+ "\xb4\x3b\xce\xe7\x17\xaa\x89\x6a"
+ "\x35\x3c\x6b\xb5\x61\x1c\x79\x38",
+ .ilen = 32,
+ .result = "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44"
+ "\x44\x44\x44\x44\x44\x44\x44\x44",
+ .rlen = 32,
+ }, {
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95",
+ .klen = 32,
+ .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\xc7\xf9\x0a\xaa\xcb\xb5\x8f\x33"
+ "\x60\xc3\xe9\x47\x90\xb7\x50\x57"
+ "\xa3\xad\x81\x2f\xf5\x22\x96\x02"
+ "\xaa\x7f\xea\xac\x29\x78\xca\x2a"
+ "\x7c\xcd\x31\x1a\x3c\x40\x0a\x73"
+ "\x09\x66\xad\x72\x0e\x4d\x5d\x77"
+ "\xbc\xb8\x76\x80\x37\x59\xa9\x01"
+ "\x9e\xfb\xdb\x6c\x93\xef\xb6\x8d"
+ "\x1e\xc1\x94\xa8\xd4\xb5\xb0\x01"
+ "\xd5\x01\x97\x28\xcd\x7a\x1f\xe8"
+ "\x08\xda\x76\x00\x65\xcf\x7b\x31"
+ "\xc6\xfa\xf2\x3b\x00\xa7\x6a\x9e"
+ "\x6c\x43\x80\x87\xe0\xbb\x4e\xe5"
+ "\xdc\x8a\xdf\xc3\x1d\x1b\x41\x04"
+ "\xfb\x54\xdd\x29\x27\xc2\x65\x17"
+ "\x36\x88\xb0\x85\x8d\x73\x7e\x4b"
+ "\x1d\x16\x8a\x52\xbc\xa6\xbc\xa4"
+ "\x8c\xd1\x04\x16\xbf\x8c\x01\x0f"
+ "\x7e\x6b\x59\x15\x29\xd1\x9b\xd3"
+ "\x6c\xee\xac\xdc\x45\x58\xca\x5b"
+ "\x70\x0e\x6a\x12\x86\x82\x79\x9f"
+ "\x16\xd4\x9d\x67\xcd\x70\x65\x26"
+ "\x21\x72\x1e\xa1\x94\x8a\x83\x0c"
+ "\x92\x42\x58\x5e\xa2\xc5\x31\xf3"
+ "\x7b\xd1\x31\xd4\x15\x80\x31\x61"
+ "\x5c\x53\x10\xdd\xea\xc8\x83\x5c"
+ "\x7d\xa7\x05\x66\xcc\x1e\xbb\x05"
+ "\x47\xae\xb4\x0f\x84\xd8\xf6\xb5"
+ "\xa1\xc6\x52\x00\x52\xe8\xdc\xd9"
+ "\x16\x31\xb2\x47\x91\x67\xaa\x28"
+ "\x2c\x29\x85\xa3\xf7\xf2\x24\x93"
+ "\x23\x80\x1f\xa8\x1b\x82\x8d\xdc"
+ "\x9f\x0b\xcd\xb4\x3c\x20\xbc\xec"
+ "\x4f\xc7\xee\xf8\xfd\xd9\xfb\x7e"
+ "\x3f\x0d\x23\xfa\x3f\xa7\xcc\x66"
+ "\x1c\xfe\xa6\x86\xf6\xf7\x85\xc7"
+ "\x43\xc1\xd4\xfc\xe4\x79\xc9\x1d"
+ "\xf8\x89\xcd\x20\x27\x84\x5d\x5c"
+ "\x8e\x4f\x1f\xeb\x08\x21\x4f\xa3"
+ "\xe0\x7e\x0b\x9c\xe7\x42\xcf\xb7"
+ "\x3f\x43\xcc\x86\x71\x34\x6a\xd9"
+ "\x5e\xec\x8f\x36\xc9\x0a\x03\xfe"
+ "\x18\x41\xdc\x9e\x2e\x75\x20\x3e"
+ "\xcc\x77\xe0\x8f\xe8\x43\x37\x4c"
+ "\xed\x1a\x5a\xb3\xfa\x43\xc9\x71"
+ "\x9f\xc5\xce\xcf\xff\xe7\x77\x1e"
+ "\x35\x93\xde\x6b\xc0\x6a\x7e\xa9"
+ "\x34\xb8\x27\x74\x08\xda\xf2\x4a"
+ "\x23\x5b\x9f\x55\x3a\x57\x82\x52"
+ "\xea\x6d\xc3\xc7\xf2\xc8\xb5\xdc"
+ "\xc5\xb9\xbb\xaa\xf2\x29\x9f\x49"
+ "\x7a\xef\xfe\xdc\x9f\xc9\x28\xe2"
+ "\x96\x0b\x35\x84\x05\x0d\xd6\x2a"
+ "\xea\x5a\xbf\x69\xde\xee\x4f\x8f"
+ "\x84\xb9\xcf\xa7\x57\xea\xe0\xe8"
+ "\x96\xef\x0f\x0e\xec\xc7\xa6\x74"
+ "\xb1\xfe\x7a\x6d\x11\xdd\x0e\x15"
+ "\x4a\x1e\x73\x7f\x55\xea\xf6\xe1"
+ "\x5b\xb6\x71\xda\xb0\x0c\xba\x26"
+ "\x5c\x48\x38\x6d\x1c\x32\xb2\x7d"
+ "\x05\x87\xc2\x1e\x7e\x2d\xd4\x33"
+ "\xcc\x06\xdb\xe7\x82\x29\x63\xd1"
+ "\x52\x84\x4f\xee\x27\xe8\x02\xd4"
+ "\x34\x3c\x69\xc2\xbd\x20\xe6\x7a",
+ .ilen = 512,
+ .result = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .rlen = 512,
+ }, {
+ .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
+ "\x23\x53\x60\x28\x74\x71\x35\x26"
+ "\x62\x49\x77\x57\x24\x70\x93\x69"
+ "\x99\x59\x57\x49\x66\x96\x76\x27"
+ "\x31\x41\x59\x26\x53\x58\x97\x93"
+ "\x23\x84\x62\x64\x33\x83\x27\x95"
+ "\x02\x88\x41\x97\x16\x93\x99\x37"
+ "\x51\x05\x82\x09\x74\x94\x45\x92",
+ .klen = 64,
+ .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
+ "\x00\x00\x00\x00\x00\x00\x00\x00",
+ .input = "\x49\xcd\xb8\xbf\x2f\x73\x37\x28"
+ "\x9a\x7f\x6e\x57\x55\xb8\x07\x88"
+ "\x4a\x0d\x8b\x55\x60\xed\xb6\x7b"
+ "\xf1\x74\xac\x96\x05\x7b\x32\xca"
+ "\xd1\x4e\xf1\x58\x29\x16\x24\x6c"
+ "\xf2\xb3\xe4\x88\x84\xac\x4d\xee"
+ "\x97\x07\x82\xf0\x07\x12\x38\x0a"
+ "\x67\x62\xaf\xfd\x85\x9f\x0a\x55"
+ "\xa5\x20\xc5\x60\xe4\x68\x53\xa4"
+ "\x0e\x2e\x65\xe3\xe4\x0c\x30\x7c"
+ "\x1c\x01\x4f\x55\xa9\x13\xeb\x25"
+ "\x21\x87\xbc\xd3\xe7\x67\x4f\x38"
+ "\xa8\x14\x25\x71\xe9\x2e\x4c\x21"
+ "\x41\x82\x0c\x45\x39\x35\xa8\x75"
+ "\x03\x29\x01\x84\x8c\xab\x48\xbe"
+ "\x11\x56\x22\x67\xb7\x67\x1a\x09"
+ "\xa1\x72\x25\x41\x3c\x39\x65\x80"
+ "\x7d\x2f\xf8\x2c\x73\x04\x58\x9d"
+ "\xdd\x16\x8b\x63\x70\x4e\xc5\x17"
+ "\x21\xe0\x84\x51\x4b\x6f\x05\x52"
+ "\xe3\x63\x34\xfa\xa4\xaf\x33\x20"
+ "\xc1\xae\x32\xc4\xb8\x2b\xdb\x76"
+ "\xd9\x02\x31\x2f\xa3\xc6\xd0\x7b"
+ "\xaf\x1b\x84\xe3\x9b\xbf\xa6\xe0"
+ "\xb8\x8a\x13\x88\x71\xf4\x11\xa5"
+ "\xe9\xa9\x10\x33\xe0\xbe\x49\x89"
+ "\x41\x22\xf5\x9d\x80\x3e\x3b\x76"
+ "\x01\x16\x50\x6e\x7c\x6a\x81\xe9"
+ "\x13\x2c\xde\xb2\x5f\x79\xba\xb2"
+ "\xb1\x75\xae\xd2\x07\x98\x4b\x69"
+ "\xae\x7d\x5b\x90\xc2\x6c\xe6\x98"
+ "\xd3\x4c\xa1\xa3\x9c\xc9\x33\x6a"
+ "\x0d\x23\xb1\x79\x25\x13\x4b\xe5"
+ "\xaf\x93\x20\x5c\x7f\x06\x7a\x34"
+ "\x0b\x78\xe3\x67\x26\xe0\xad\x95"
+ "\xc5\x4e\x26\x22\xcf\x73\x77\x62"
+ "\x3e\x10\xd7\x90\x4b\x52\x1c\xc9"
+ "\xef\x38\x52\x18\x0e\x29\x7e\xef"
+ "\x34\xfe\x31\x95\xc5\xbc\xa8\xe2"
+ "\xa8\x4e\x9f\xea\xa6\xf0\xfe\x5d"
+ "\xc5\x39\x86\xed\x2f\x6d\xa0\xfe"
+ "\x96\xcd\x41\x10\x78\x4e\x0c\xc9"
+ "\xc3\x6d\x0f\xb7\xe8\xe0\x62\xab"
+ "\x8b\xf1\x21\x89\xa1\x12\xaa\xfa"
+ "\x9d\x70\xbe\x4c\xa8\x98\x89\x01"
+ "\xb9\xe2\x61\xde\x0c\x4a\x0b\xaa"
+ "\x89\xf5\x14\x79\x18\x8f\x3b\x0d"
+ "\x21\x17\xf8\x59\x15\x24\x64\x22"
+ "\x57\x48\x80\xd5\x3d\x92\x30\x07"
+ "\xd9\xa1\x4a\x23\x16\x43\x48\x0e"
+ "\x2b\x2d\x1b\x87\xef\x7e\xbd\xfa"
+ "\x49\xbc\x7e\x68\x6e\xa8\x46\x95"
+ "\xad\x5e\xfe\x0a\xa8\xd3\x1a\x5d"
+ "\x6b\x84\xf3\x00\xba\x52\x05\x02"
+ "\xe3\x96\x4e\xb6\x79\x3f\x43\xd3"
+ "\x4d\x3f\xd6\xab\x0a\xc4\x75\x2d"
+ "\xd1\x08\xc3\x6a\xc8\x37\x29\xa0"
+ "\xcc\x9a\x05\xdd\x5c\xe1\xff\x66"
+ "\xf2\x7a\x1d\xf2\xaf\xa9\x48\x89"
+ "\xf5\x21\x0f\x02\x48\x83\x74\xbf"
+ "\x2e\xe6\x93\x7b\xa0\xf4\xb1\x2b"
+ "\xb1\x02\x0a\x5c\x79\x19\x3b\x75"
+ "\xb7\x16\xd8\x12\x5c\xcd\x7d\x4e"
+ "\xd5\xc6\x99\xcc\x4e\x6c\x94\x95",
+ .ilen = 512,
+ .result = "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
+ "\x00\x01\x02\x03\x04\x05\x06\x07"
+ "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
+ "\x10\x11\x12\x13\x14\x15\x16\x17"
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x21\x22\x23\x24\x25\x26\x27"
+ "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
+ "\x30\x31\x32\x33\x34\x35\x36\x37"
+ "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
+ "\x40\x41\x42\x43\x44\x45\x46\x47"
+ "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
+ "\x50\x51\x52\x53\x54\x55\x56\x57"
+ "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
+ "\x60\x61\x62\x63\x64\x65\x66\x67"
+ "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
+ "\x70\x71\x72\x73\x74\x75\x76\x77"
+ "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
+ "\x80\x81\x82\x83\x84\x85\x86\x87"
+ "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
+ "\x90\x91\x92\x93\x94\x95\x96\x97"
+ "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
+ "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
+ "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
+ "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
+ "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
+ "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
+ "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
+ "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
+ "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
+ "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
+ "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
+ "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
+ "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
+ .rlen = 512,
+ .also_non_np = 1,
+ .np = 2,
+ .tap = { 512 - 16, 16 },
},
};
@@ -8347,10 +27706,19 @@ struct comp_testvec {
char output[COMP_BUF_SIZE];
};
+struct pcomp_testvec {
+ void *params;
+ unsigned int paramsize;
+ int inlen, outlen;
+ char input[COMP_BUF_SIZE];
+ char output[COMP_BUF_SIZE];
+};
+
/*
* Deflate test vectors (null-terminated strings).
* Params: winbits=-11, Z_DEFAULT_COMPRESSION, MAX_MEM_LEVEL.
*/
+
#define DEFLATE_COMP_TEST_VECTORS 2
#define DEFLATE_DECOMP_TEST_VECTORS 2
@@ -8426,6 +27794,139 @@ static struct comp_testvec deflate_decomp_tv_template[] = {
},
};
+#define ZLIB_COMP_TEST_VECTORS 2
+#define ZLIB_DECOMP_TEST_VECTORS 2
+
+static const struct {
+ struct nlattr nla;
+ int val;
+} deflate_comp_params[] = {
+ {
+ .nla = {
+ .nla_len = NLA_HDRLEN + sizeof(int),
+ .nla_type = ZLIB_COMP_LEVEL,
+ },
+ .val = Z_DEFAULT_COMPRESSION,
+ }, {
+ .nla = {
+ .nla_len = NLA_HDRLEN + sizeof(int),
+ .nla_type = ZLIB_COMP_METHOD,
+ },
+ .val = Z_DEFLATED,
+ }, {
+ .nla = {
+ .nla_len = NLA_HDRLEN + sizeof(int),
+ .nla_type = ZLIB_COMP_WINDOWBITS,
+ },
+ .val = -11,
+ }, {
+ .nla = {
+ .nla_len = NLA_HDRLEN + sizeof(int),
+ .nla_type = ZLIB_COMP_MEMLEVEL,
+ },
+ .val = MAX_MEM_LEVEL,
+ }, {
+ .nla = {
+ .nla_len = NLA_HDRLEN + sizeof(int),
+ .nla_type = ZLIB_COMP_STRATEGY,
+ },
+ .val = Z_DEFAULT_STRATEGY,
+ }
+};
+
+static const struct {
+ struct nlattr nla;
+ int val;
+} deflate_decomp_params[] = {
+ {
+ .nla = {
+ .nla_len = NLA_HDRLEN + sizeof(int),
+ .nla_type = ZLIB_DECOMP_WINDOWBITS,
+ },
+ .val = -11,
+ }
+};
+
+static struct pcomp_testvec zlib_comp_tv_template[] = {
+ {
+ .params = &deflate_comp_params,
+ .paramsize = sizeof(deflate_comp_params),
+ .inlen = 70,
+ .outlen = 38,
+ .input = "Join us now and share the software "
+ "Join us now and share the software ",
+ .output = "\xf3\xca\xcf\xcc\x53\x28\x2d\x56"
+ "\xc8\xcb\x2f\x57\x48\xcc\x4b\x51"
+ "\x28\xce\x48\x2c\x4a\x55\x28\xc9"
+ "\x48\x55\x28\xce\x4f\x2b\x29\x07"
+ "\x71\xbc\x08\x2b\x01\x00",
+ }, {
+ .params = &deflate_comp_params,
+ .paramsize = sizeof(deflate_comp_params),
+ .inlen = 191,
+ .outlen = 122,
+ .input = "This document describes a compression method based on the DEFLATE"
+ "compression algorithm. This document defines the application of "
+ "the DEFLATE algorithm to the IP Payload Compression Protocol.",
+ .output = "\x5d\x8d\x31\x0e\xc2\x30\x10\x04"
+ "\xbf\xb2\x2f\xc8\x1f\x10\x04\x09"
+ "\x89\xc2\x85\x3f\x70\xb1\x2f\xf8"
+ "\x24\xdb\x67\xd9\x47\xc1\xef\x49"
+ "\x68\x12\x51\xae\x76\x67\xd6\x27"
+ "\x19\x88\x1a\xde\x85\xab\x21\xf2"
+ "\x08\x5d\x16\x1e\x20\x04\x2d\xad"
+ "\xf3\x18\xa2\x15\x85\x2d\x69\xc4"
+ "\x42\x83\x23\xb6\x6c\x89\x71\x9b"
+ "\xef\xcf\x8b\x9f\xcf\x33\xca\x2f"
+ "\xed\x62\xa9\x4c\x80\xff\x13\xaf"
+ "\x52\x37\xed\x0e\x52\x6b\x59\x02"
+ "\xd9\x4e\xe8\x7a\x76\x1d\x02\x98"
+ "\xfe\x8a\x87\x83\xa3\x4f\x56\x8a"
+ "\xb8\x9e\x8e\x5c\x57\xd3\xa0\x79"
+ "\xfa\x02",
+ },
+};
+
+static struct pcomp_testvec zlib_decomp_tv_template[] = {
+ {
+ .params = &deflate_decomp_params,
+ .paramsize = sizeof(deflate_decomp_params),
+ .inlen = 122,
+ .outlen = 191,
+ .input = "\x5d\x8d\x31\x0e\xc2\x30\x10\x04"
+ "\xbf\xb2\x2f\xc8\x1f\x10\x04\x09"
+ "\x89\xc2\x85\x3f\x70\xb1\x2f\xf8"
+ "\x24\xdb\x67\xd9\x47\xc1\xef\x49"
+ "\x68\x12\x51\xae\x76\x67\xd6\x27"
+ "\x19\x88\x1a\xde\x85\xab\x21\xf2"
+ "\x08\x5d\x16\x1e\x20\x04\x2d\xad"
+ "\xf3\x18\xa2\x15\x85\x2d\x69\xc4"
+ "\x42\x83\x23\xb6\x6c\x89\x71\x9b"
+ "\xef\xcf\x8b\x9f\xcf\x33\xca\x2f"
+ "\xed\x62\xa9\x4c\x80\xff\x13\xaf"
+ "\x52\x37\xed\x0e\x52\x6b\x59\x02"
+ "\xd9\x4e\xe8\x7a\x76\x1d\x02\x98"
+ "\xfe\x8a\x87\x83\xa3\x4f\x56\x8a"
+ "\xb8\x9e\x8e\x5c\x57\xd3\xa0\x79"
+ "\xfa\x02",
+ .output = "This document describes a compression method based on the DEFLATE"
+ "compression algorithm. This document defines the application of "
+ "the DEFLATE algorithm to the IP Payload Compression Protocol.",
+ }, {
+ .params = &deflate_decomp_params,
+ .paramsize = sizeof(deflate_decomp_params),
+ .inlen = 38,
+ .outlen = 70,
+ .input = "\xf3\xca\xcf\xcc\x53\x28\x2d\x56"
+ "\xc8\xcb\x2f\x57\x48\xcc\x4b\x51"
+ "\x28\xce\x48\x2c\x4a\x55\x28\xc9"
+ "\x48\x55\x28\xce\x4f\x2b\x29\x07"
+ "\x71\xbc\x08\x2b\x01\x00",
+ .output = "Join us now and share the software "
+ "Join us now and share the software ",
+ },
+};
+
/*
* LZO test vectors (null-terminated strings).
*/
@@ -8435,38 +27936,40 @@ static struct comp_testvec deflate_decomp_tv_template[] = {
static struct comp_testvec lzo_comp_tv_template[] = {
{
.inlen = 70,
- .outlen = 46,
+ .outlen = 57,
.input = "Join us now and share the software "
"Join us now and share the software ",
.output = "\x00\x0d\x4a\x6f\x69\x6e\x20\x75"
- "\x73\x20\x6e\x6f\x77\x20\x61\x6e"
- "\x64\x20\x73\x68\x61\x72\x65\x20"
- "\x74\x68\x65\x20\x73\x6f\x66\x74"
- "\x77\x70\x01\x01\x4a\x6f\x69\x6e"
- "\x3d\x88\x00\x11\x00\x00",
+ "\x73\x20\x6e\x6f\x77\x20\x61\x6e"
+ "\x64\x20\x73\x68\x61\x72\x65\x20"
+ "\x74\x68\x65\x20\x73\x6f\x66\x74"
+ "\x77\x70\x01\x32\x88\x00\x0c\x65"
+ "\x20\x74\x68\x65\x20\x73\x6f\x66"
+ "\x74\x77\x61\x72\x65\x20\x11\x00"
+ "\x00",
}, {
.inlen = 159,
- .outlen = 133,
+ .outlen = 131,
.input = "This document describes a compression method based on the LZO "
"compression algorithm. This document defines the application of "
"the LZO algorithm used in UBIFS.",
- .output = "\x00\x2b\x54\x68\x69\x73\x20\x64"
+ .output = "\x00\x2c\x54\x68\x69\x73\x20\x64"
"\x6f\x63\x75\x6d\x65\x6e\x74\x20"
"\x64\x65\x73\x63\x72\x69\x62\x65"
"\x73\x20\x61\x20\x63\x6f\x6d\x70"
"\x72\x65\x73\x73\x69\x6f\x6e\x20"
"\x6d\x65\x74\x68\x6f\x64\x20\x62"
"\x61\x73\x65\x64\x20\x6f\x6e\x20"
- "\x74\x68\x65\x20\x4c\x5a\x4f\x2b"
- "\x8c\x00\x0d\x61\x6c\x67\x6f\x72"
- "\x69\x74\x68\x6d\x2e\x20\x20\x54"
- "\x68\x69\x73\x2a\x54\x01\x02\x66"
- "\x69\x6e\x65\x73\x94\x06\x05\x61"
- "\x70\x70\x6c\x69\x63\x61\x74\x76"
- "\x0a\x6f\x66\x88\x02\x60\x09\x27"
- "\xf0\x00\x0c\x20\x75\x73\x65\x64"
- "\x20\x69\x6e\x20\x55\x42\x49\x46"
- "\x53\x2e\x11\x00\x00",
+ "\x74\x68\x65\x20\x4c\x5a\x4f\x20"
+ "\x2a\x8c\x00\x09\x61\x6c\x67\x6f"
+ "\x72\x69\x74\x68\x6d\x2e\x20\x20"
+ "\x2e\x54\x01\x03\x66\x69\x6e\x65"
+ "\x73\x20\x74\x06\x05\x61\x70\x70"
+ "\x6c\x69\x63\x61\x74\x76\x0a\x6f"
+ "\x66\x88\x02\x60\x09\x27\xf0\x00"
+ "\x0c\x20\x75\x73\x65\x64\x20\x69"
+ "\x6e\x20\x55\x42\x49\x46\x53\x2e"
+ "\x11\x00\x00",
},
};
@@ -8561,7 +28064,7 @@ static struct hash_testvec michael_mic_tv_template[] = {
/*
* CRC32C test vectors
*/
-#define CRC32C_TEST_VECTORS 14
+#define CRC32C_TEST_VECTORS 15
static struct hash_testvec crc32c_tv_template[] = {
{
@@ -8732,7 +28235,358 @@ static struct hash_testvec crc32c_tv_template[] = {
.digest = "\x75\xd3\xc5\x24",
.np = 2,
.tap = { 31, 209 }
+ }, {
+ .key = "\xff\xff\xff\xff",
+ .ksize = 4,
+ .plaintext = "\x6e\x05\x79\x10\xa7\x1b\xb2\x49"
+ "\xe0\x54\xeb\x82\x19\x8d\x24\xbb"
+ "\x2f\xc6\x5d\xf4\x68\xff\x96\x0a"
+ "\xa1\x38\xcf\x43\xda\x71\x08\x7c"
+ "\x13\xaa\x1e\xb5\x4c\xe3\x57\xee"
+ "\x85\x1c\x90\x27\xbe\x32\xc9\x60"
+ "\xf7\x6b\x02\x99\x0d\xa4\x3b\xd2"
+ "\x46\xdd\x74\x0b\x7f\x16\xad\x21"
+ "\xb8\x4f\xe6\x5a\xf1\x88\x1f\x93"
+ "\x2a\xc1\x35\xcc\x63\xfa\x6e\x05"
+ "\x9c\x10\xa7\x3e\xd5\x49\xe0\x77"
+ "\x0e\x82\x19\xb0\x24\xbb\x52\xe9"
+ "\x5d\xf4\x8b\x22\x96\x2d\xc4\x38"
+ "\xcf\x66\xfd\x71\x08\x9f\x13\xaa"
+ "\x41\xd8\x4c\xe3\x7a\x11\x85\x1c"
+ "\xb3\x27\xbe\x55\xec\x60\xf7\x8e"
+ "\x02\x99\x30\xc7\x3b\xd2\x69\x00"
+ "\x74\x0b\xa2\x16\xad\x44\xdb\x4f"
+ "\xe6\x7d\x14\x88\x1f\xb6\x2a\xc1"
+ "\x58\xef\x63\xfa\x91\x05\x9c\x33"
+ "\xca\x3e\xd5\x6c\x03\x77\x0e\xa5"
+ "\x19\xb0\x47\xde\x52\xe9\x80\x17"
+ "\x8b\x22\xb9\x2d\xc4\x5b\xf2\x66"
+ "\xfd\x94\x08\x9f\x36\xcd\x41\xd8"
+ "\x6f\x06\x7a\x11\xa8\x1c\xb3\x4a"
+ "\xe1\x55\xec\x83\x1a\x8e\x25\xbc"
+ "\x30\xc7\x5e\xf5\x69\x00\x97\x0b"
+ "\xa2\x39\xd0\x44\xdb\x72\x09\x7d"
+ "\x14\xab\x1f\xb6\x4d\xe4\x58\xef"
+ "\x86\x1d\x91\x28\xbf\x33\xca\x61"
+ "\xf8\x6c\x03\x9a\x0e\xa5\x3c\xd3"
+ "\x47\xde\x75\x0c\x80\x17\xae\x22"
+ "\xb9\x50\xe7\x5b\xf2\x89\x20\x94"
+ "\x2b\xc2\x36\xcd\x64\xfb\x6f\x06"
+ "\x9d\x11\xa8\x3f\xd6\x4a\xe1\x78"
+ "\x0f\x83\x1a\xb1\x25\xbc\x53\xea"
+ "\x5e\xf5\x8c\x00\x97\x2e\xc5\x39"
+ "\xd0\x67\xfe\x72\x09\xa0\x14\xab"
+ "\x42\xd9\x4d\xe4\x7b\x12\x86\x1d"
+ "\xb4\x28\xbf\x56\xed\x61\xf8\x8f"
+ "\x03\x9a\x31\xc8\x3c\xd3\x6a\x01"
+ "\x75\x0c\xa3\x17\xae\x45\xdc\x50"
+ "\xe7\x7e\x15\x89\x20\xb7\x2b\xc2"
+ "\x59\xf0\x64\xfb\x92\x06\x9d\x34"
+ "\xcb\x3f\xd6\x6d\x04\x78\x0f\xa6"
+ "\x1a\xb1\x48\xdf\x53\xea\x81\x18"
+ "\x8c\x23\xba\x2e\xc5\x5c\xf3\x67"
+ "\xfe\x95\x09\xa0\x37\xce\x42\xd9"
+ "\x70\x07\x7b\x12\xa9\x1d\xb4\x4b"
+ "\xe2\x56\xed\x84\x1b\x8f\x26\xbd"
+ "\x31\xc8\x5f\xf6\x6a\x01\x98\x0c"
+ "\xa3\x3a\xd1\x45\xdc\x73\x0a\x7e"
+ "\x15\xac\x20\xb7\x4e\xe5\x59\xf0"
+ "\x87\x1e\x92\x29\xc0\x34\xcb\x62"
+ "\xf9\x6d\x04\x9b\x0f\xa6\x3d\xd4"
+ "\x48\xdf\x76\x0d\x81\x18\xaf\x23"
+ "\xba\x51\xe8\x5c\xf3\x8a\x21\x95"
+ "\x2c\xc3\x37\xce\x65\xfc\x70\x07"
+ "\x9e\x12\xa9\x40\xd7\x4b\xe2\x79"
+ "\x10\x84\x1b\xb2\x26\xbd\x54\xeb"
+ "\x5f\xf6\x8d\x01\x98\x2f\xc6\x3a"
+ "\xd1\x68\xff\x73\x0a\xa1\x15\xac"
+ "\x43\xda\x4e\xe5\x7c\x13\x87\x1e"
+ "\xb5\x29\xc0\x57\xee\x62\xf9\x90"
+ "\x04\x9b\x32\xc9\x3d\xd4\x6b\x02"
+ "\x76\x0d\xa4\x18\xaf\x46\xdd\x51"
+ "\xe8\x7f\x16\x8a\x21\xb8\x2c\xc3"
+ "\x5a\xf1\x65\xfc\x93\x07\x9e\x35"
+ "\xcc\x40\xd7\x6e\x05\x79\x10\xa7"
+ "\x1b\xb2\x49\xe0\x54\xeb\x82\x19"
+ "\x8d\x24\xbb\x2f\xc6\x5d\xf4\x68"
+ "\xff\x96\x0a\xa1\x38\xcf\x43\xda"
+ "\x71\x08\x7c\x13\xaa\x1e\xb5\x4c"
+ "\xe3\x57\xee\x85\x1c\x90\x27\xbe"
+ "\x32\xc9\x60\xf7\x6b\x02\x99\x0d"
+ "\xa4\x3b\xd2\x46\xdd\x74\x0b\x7f"
+ "\x16\xad\x21\xb8\x4f\xe6\x5a\xf1"
+ "\x88\x1f\x93\x2a\xc1\x35\xcc\x63"
+ "\xfa\x6e\x05\x9c\x10\xa7\x3e\xd5"
+ "\x49\xe0\x77\x0e\x82\x19\xb0\x24"
+ "\xbb\x52\xe9\x5d\xf4\x8b\x22\x96"
+ "\x2d\xc4\x38\xcf\x66\xfd\x71\x08"
+ "\x9f\x13\xaa\x41\xd8\x4c\xe3\x7a"
+ "\x11\x85\x1c\xb3\x27\xbe\x55\xec"
+ "\x60\xf7\x8e\x02\x99\x30\xc7\x3b"
+ "\xd2\x69\x00\x74\x0b\xa2\x16\xad"
+ "\x44\xdb\x4f\xe6\x7d\x14\x88\x1f"
+ "\xb6\x2a\xc1\x58\xef\x63\xfa\x91"
+ "\x05\x9c\x33\xca\x3e\xd5\x6c\x03"
+ "\x77\x0e\xa5\x19\xb0\x47\xde\x52"
+ "\xe9\x80\x17\x8b\x22\xb9\x2d\xc4"
+ "\x5b\xf2\x66\xfd\x94\x08\x9f\x36"
+ "\xcd\x41\xd8\x6f\x06\x7a\x11\xa8"
+ "\x1c\xb3\x4a\xe1\x55\xec\x83\x1a"
+ "\x8e\x25\xbc\x30\xc7\x5e\xf5\x69"
+ "\x00\x97\x0b\xa2\x39\xd0\x44\xdb"
+ "\x72\x09\x7d\x14\xab\x1f\xb6\x4d"
+ "\xe4\x58\xef\x86\x1d\x91\x28\xbf"
+ "\x33\xca\x61\xf8\x6c\x03\x9a\x0e"
+ "\xa5\x3c\xd3\x47\xde\x75\x0c\x80"
+ "\x17\xae\x22\xb9\x50\xe7\x5b\xf2"
+ "\x89\x20\x94\x2b\xc2\x36\xcd\x64"
+ "\xfb\x6f\x06\x9d\x11\xa8\x3f\xd6"
+ "\x4a\xe1\x78\x0f\x83\x1a\xb1\x25"
+ "\xbc\x53\xea\x5e\xf5\x8c\x00\x97"
+ "\x2e\xc5\x39\xd0\x67\xfe\x72\x09"
+ "\xa0\x14\xab\x42\xd9\x4d\xe4\x7b"
+ "\x12\x86\x1d\xb4\x28\xbf\x56\xed"
+ "\x61\xf8\x8f\x03\x9a\x31\xc8\x3c"
+ "\xd3\x6a\x01\x75\x0c\xa3\x17\xae"
+ "\x45\xdc\x50\xe7\x7e\x15\x89\x20"
+ "\xb7\x2b\xc2\x59\xf0\x64\xfb\x92"
+ "\x06\x9d\x34\xcb\x3f\xd6\x6d\x04"
+ "\x78\x0f\xa6\x1a\xb1\x48\xdf\x53"
+ "\xea\x81\x18\x8c\x23\xba\x2e\xc5"
+ "\x5c\xf3\x67\xfe\x95\x09\xa0\x37"
+ "\xce\x42\xd9\x70\x07\x7b\x12\xa9"
+ "\x1d\xb4\x4b\xe2\x56\xed\x84\x1b"
+ "\x8f\x26\xbd\x31\xc8\x5f\xf6\x6a"
+ "\x01\x98\x0c\xa3\x3a\xd1\x45\xdc"
+ "\x73\x0a\x7e\x15\xac\x20\xb7\x4e"
+ "\xe5\x59\xf0\x87\x1e\x92\x29\xc0"
+ "\x34\xcb\x62\xf9\x6d\x04\x9b\x0f"
+ "\xa6\x3d\xd4\x48\xdf\x76\x0d\x81"
+ "\x18\xaf\x23\xba\x51\xe8\x5c\xf3"
+ "\x8a\x21\x95\x2c\xc3\x37\xce\x65"
+ "\xfc\x70\x07\x9e\x12\xa9\x40\xd7"
+ "\x4b\xe2\x79\x10\x84\x1b\xb2\x26"
+ "\xbd\x54\xeb\x5f\xf6\x8d\x01\x98"
+ "\x2f\xc6\x3a\xd1\x68\xff\x73\x0a"
+ "\xa1\x15\xac\x43\xda\x4e\xe5\x7c"
+ "\x13\x87\x1e\xb5\x29\xc0\x57\xee"
+ "\x62\xf9\x90\x04\x9b\x32\xc9\x3d"
+ "\xd4\x6b\x02\x76\x0d\xa4\x18\xaf"
+ "\x46\xdd\x51\xe8\x7f\x16\x8a\x21"
+ "\xb8\x2c\xc3\x5a\xf1\x65\xfc\x93"
+ "\x07\x9e\x35\xcc\x40\xd7\x6e\x05"
+ "\x79\x10\xa7\x1b\xb2\x49\xe0\x54"
+ "\xeb\x82\x19\x8d\x24\xbb\x2f\xc6"
+ "\x5d\xf4\x68\xff\x96\x0a\xa1\x38"
+ "\xcf\x43\xda\x71\x08\x7c\x13\xaa"
+ "\x1e\xb5\x4c\xe3\x57\xee\x85\x1c"
+ "\x90\x27\xbe\x32\xc9\x60\xf7\x6b"
+ "\x02\x99\x0d\xa4\x3b\xd2\x46\xdd"
+ "\x74\x0b\x7f\x16\xad\x21\xb8\x4f"
+ "\xe6\x5a\xf1\x88\x1f\x93\x2a\xc1"
+ "\x35\xcc\x63\xfa\x6e\x05\x9c\x10"
+ "\xa7\x3e\xd5\x49\xe0\x77\x0e\x82"
+ "\x19\xb0\x24\xbb\x52\xe9\x5d\xf4"
+ "\x8b\x22\x96\x2d\xc4\x38\xcf\x66"
+ "\xfd\x71\x08\x9f\x13\xaa\x41\xd8"
+ "\x4c\xe3\x7a\x11\x85\x1c\xb3\x27"
+ "\xbe\x55\xec\x60\xf7\x8e\x02\x99"
+ "\x30\xc7\x3b\xd2\x69\x00\x74\x0b"
+ "\xa2\x16\xad\x44\xdb\x4f\xe6\x7d"
+ "\x14\x88\x1f\xb6\x2a\xc1\x58\xef"
+ "\x63\xfa\x91\x05\x9c\x33\xca\x3e"
+ "\xd5\x6c\x03\x77\x0e\xa5\x19\xb0"
+ "\x47\xde\x52\xe9\x80\x17\x8b\x22"
+ "\xb9\x2d\xc4\x5b\xf2\x66\xfd\x94"
+ "\x08\x9f\x36\xcd\x41\xd8\x6f\x06"
+ "\x7a\x11\xa8\x1c\xb3\x4a\xe1\x55"
+ "\xec\x83\x1a\x8e\x25\xbc\x30\xc7"
+ "\x5e\xf5\x69\x00\x97\x0b\xa2\x39"
+ "\xd0\x44\xdb\x72\x09\x7d\x14\xab"
+ "\x1f\xb6\x4d\xe4\x58\xef\x86\x1d"
+ "\x91\x28\xbf\x33\xca\x61\xf8\x6c"
+ "\x03\x9a\x0e\xa5\x3c\xd3\x47\xde"
+ "\x75\x0c\x80\x17\xae\x22\xb9\x50"
+ "\xe7\x5b\xf2\x89\x20\x94\x2b\xc2"
+ "\x36\xcd\x64\xfb\x6f\x06\x9d\x11"
+ "\xa8\x3f\xd6\x4a\xe1\x78\x0f\x83"
+ "\x1a\xb1\x25\xbc\x53\xea\x5e\xf5"
+ "\x8c\x00\x97\x2e\xc5\x39\xd0\x67"
+ "\xfe\x72\x09\xa0\x14\xab\x42\xd9"
+ "\x4d\xe4\x7b\x12\x86\x1d\xb4\x28"
+ "\xbf\x56\xed\x61\xf8\x8f\x03\x9a"
+ "\x31\xc8\x3c\xd3\x6a\x01\x75\x0c"
+ "\xa3\x17\xae\x45\xdc\x50\xe7\x7e"
+ "\x15\x89\x20\xb7\x2b\xc2\x59\xf0"
+ "\x64\xfb\x92\x06\x9d\x34\xcb\x3f"
+ "\xd6\x6d\x04\x78\x0f\xa6\x1a\xb1"
+ "\x48\xdf\x53\xea\x81\x18\x8c\x23"
+ "\xba\x2e\xc5\x5c\xf3\x67\xfe\x95"
+ "\x09\xa0\x37\xce\x42\xd9\x70\x07"
+ "\x7b\x12\xa9\x1d\xb4\x4b\xe2\x56"
+ "\xed\x84\x1b\x8f\x26\xbd\x31\xc8"
+ "\x5f\xf6\x6a\x01\x98\x0c\xa3\x3a"
+ "\xd1\x45\xdc\x73\x0a\x7e\x15\xac"
+ "\x20\xb7\x4e\xe5\x59\xf0\x87\x1e"
+ "\x92\x29\xc0\x34\xcb\x62\xf9\x6d"
+ "\x04\x9b\x0f\xa6\x3d\xd4\x48\xdf"
+ "\x76\x0d\x81\x18\xaf\x23\xba\x51"
+ "\xe8\x5c\xf3\x8a\x21\x95\x2c\xc3"
+ "\x37\xce\x65\xfc\x70\x07\x9e\x12"
+ "\xa9\x40\xd7\x4b\xe2\x79\x10\x84"
+ "\x1b\xb2\x26\xbd\x54\xeb\x5f\xf6"
+ "\x8d\x01\x98\x2f\xc6\x3a\xd1\x68"
+ "\xff\x73\x0a\xa1\x15\xac\x43\xda"
+ "\x4e\xe5\x7c\x13\x87\x1e\xb5\x29"
+ "\xc0\x57\xee\x62\xf9\x90\x04\x9b"
+ "\x32\xc9\x3d\xd4\x6b\x02\x76\x0d"
+ "\xa4\x18\xaf\x46\xdd\x51\xe8\x7f"
+ "\x16\x8a\x21\xb8\x2c\xc3\x5a\xf1"
+ "\x65\xfc\x93\x07\x9e\x35\xcc\x40"
+ "\xd7\x6e\x05\x79\x10\xa7\x1b\xb2"
+ "\x49\xe0\x54\xeb\x82\x19\x8d\x24"
+ "\xbb\x2f\xc6\x5d\xf4\x68\xff\x96"
+ "\x0a\xa1\x38\xcf\x43\xda\x71\x08"
+ "\x7c\x13\xaa\x1e\xb5\x4c\xe3\x57"
+ "\xee\x85\x1c\x90\x27\xbe\x32\xc9"
+ "\x60\xf7\x6b\x02\x99\x0d\xa4\x3b"
+ "\xd2\x46\xdd\x74\x0b\x7f\x16\xad"
+ "\x21\xb8\x4f\xe6\x5a\xf1\x88\x1f"
+ "\x93\x2a\xc1\x35\xcc\x63\xfa\x6e"
+ "\x05\x9c\x10\xa7\x3e\xd5\x49\xe0"
+ "\x77\x0e\x82\x19\xb0\x24\xbb\x52"
+ "\xe9\x5d\xf4\x8b\x22\x96\x2d\xc4"
+ "\x38\xcf\x66\xfd\x71\x08\x9f\x13"
+ "\xaa\x41\xd8\x4c\xe3\x7a\x11\x85"
+ "\x1c\xb3\x27\xbe\x55\xec\x60\xf7"
+ "\x8e\x02\x99\x30\xc7\x3b\xd2\x69"
+ "\x00\x74\x0b\xa2\x16\xad\x44\xdb"
+ "\x4f\xe6\x7d\x14\x88\x1f\xb6\x2a"
+ "\xc1\x58\xef\x63\xfa\x91\x05\x9c"
+ "\x33\xca\x3e\xd5\x6c\x03\x77\x0e"
+ "\xa5\x19\xb0\x47\xde\x52\xe9\x80"
+ "\x17\x8b\x22\xb9\x2d\xc4\x5b\xf2"
+ "\x66\xfd\x94\x08\x9f\x36\xcd\x41"
+ "\xd8\x6f\x06\x7a\x11\xa8\x1c\xb3"
+ "\x4a\xe1\x55\xec\x83\x1a\x8e\x25"
+ "\xbc\x30\xc7\x5e\xf5\x69\x00\x97"
+ "\x0b\xa2\x39\xd0\x44\xdb\x72\x09"
+ "\x7d\x14\xab\x1f\xb6\x4d\xe4\x58"
+ "\xef\x86\x1d\x91\x28\xbf\x33\xca"
+ "\x61\xf8\x6c\x03\x9a\x0e\xa5\x3c"
+ "\xd3\x47\xde\x75\x0c\x80\x17\xae"
+ "\x22\xb9\x50\xe7\x5b\xf2\x89\x20"
+ "\x94\x2b\xc2\x36\xcd\x64\xfb\x6f"
+ "\x06\x9d\x11\xa8\x3f\xd6\x4a\xe1"
+ "\x78\x0f\x83\x1a\xb1\x25\xbc\x53"
+ "\xea\x5e\xf5\x8c\x00\x97\x2e\xc5"
+ "\x39\xd0\x67\xfe\x72\x09\xa0\x14"
+ "\xab\x42\xd9\x4d\xe4\x7b\x12\x86"
+ "\x1d\xb4\x28\xbf\x56\xed\x61\xf8"
+ "\x8f\x03\x9a\x31\xc8\x3c\xd3\x6a"
+ "\x01\x75\x0c\xa3\x17\xae\x45\xdc"
+ "\x50\xe7\x7e\x15\x89\x20\xb7\x2b"
+ "\xc2\x59\xf0\x64\xfb\x92\x06\x9d"
+ "\x34\xcb\x3f\xd6\x6d\x04\x78\x0f"
+ "\xa6\x1a\xb1\x48\xdf\x53\xea\x81"
+ "\x18\x8c\x23\xba\x2e\xc5\x5c\xf3"
+ "\x67\xfe\x95\x09\xa0\x37\xce\x42"
+ "\xd9\x70\x07\x7b\x12\xa9\x1d\xb4"
+ "\x4b\xe2\x56\xed\x84\x1b\x8f\x26"
+ "\xbd\x31\xc8\x5f\xf6\x6a\x01\x98",
+ .psize = 2048,
+ .digest = "\xec\x26\x4d\x95",
+ }
+};
+
+/*
+ * Blakcifn CRC test vectors
+ */
+#define BFIN_CRC_TEST_VECTORS 6
+
+static struct hash_testvec bfin_crc_tv_template[] = {
+ {
+ .psize = 0,
+ .digest = "\x00\x00\x00\x00",
+ },
+ {
+ .key = "\x87\xa9\xcb\xed",
+ .ksize = 4,
+ .psize = 0,
+ .digest = "\x87\xa9\xcb\xed",
},
+ {
+ .key = "\xff\xff\xff\xff",
+ .ksize = 4,
+ .plaintext = "\x01\x02\x03\x04\x05\x06\x07\x08"
+ "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10"
+ "\x11\x12\x13\x14\x15\x16\x17\x18"
+ "\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20"
+ "\x21\x22\x23\x24\x25\x26\x27\x28",
+ .psize = 40,
+ .digest = "\x84\x0c\x8d\xa2",
+ },
+ {
+ .key = "\xff\xff\xff\xff",
+ .ksize = 4,
+ .plaintext = "\x01\x02\x03\x04\x05\x06\x07\x08"
+ "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10"
+ "\x11\x12\x13\x14\x15\x16\x17\x18"
+ "\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20"
+ "\x21\x22\x23\x24\x25\x26",
+ .psize = 38,
+ .digest = "\x8c\x58\xec\xb7",
+ },
+ {
+ .key = "\xff\xff\xff\xff",
+ .ksize = 4,
+ .plaintext = "\x01\x02\x03\x04\x05\x06\x07\x08"
+ "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10"
+ "\x11\x12\x13\x14\x15\x16\x17\x18"
+ "\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20"
+ "\x21\x22\x23\x24\x25\x26\x27",
+ .psize = 39,
+ .digest = "\xdc\x50\x28\x7b",
+ },
+ {
+ .key = "\xff\xff\xff\xff",
+ .ksize = 4,
+ .plaintext = "\x01\x02\x03\x04\x05\x06\x07\x08"
+ "\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10"
+ "\x11\x12\x13\x14\x15\x16\x17\x18"
+ "\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20"
+ "\x21\x22\x23\x24\x25\x26\x27\x28"
+ "\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30"
+ "\x31\x32\x33\x34\x35\x36\x37\x38"
+ "\x39\x3a\x3b\x3c\x3d\x3e\x3f\x40"
+ "\x41\x42\x43\x44\x45\x46\x47\x48"
+ "\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50"
+ "\x51\x52\x53\x54\x55\x56\x57\x58"
+ "\x59\x5a\x5b\x5c\x5d\x5e\x5f\x60"
+ "\x61\x62\x63\x64\x65\x66\x67\x68"
+ "\x69\x6a\x6b\x6c\x6d\x6e\x6f\x70"
+ "\x71\x72\x73\x74\x75\x76\x77\x78"
+ "\x79\x7a\x7b\x7c\x7d\x7e\x7f\x80"
+ "\x81\x82\x83\x84\x85\x86\x87\x88"
+ "\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90"
+ "\x91\x92\x93\x94\x95\x96\x97\x98"
+ "\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0"
+ "\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8"
+ "\xa9\xaa\xab\xac\xad\xae\xaf\xb0"
+ "\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8"
+ "\xb9\xba\xbb\xbc\xbd\xbe\xbf\xc0"
+ "\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8"
+ "\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0"
+ "\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8"
+ "\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0"
+ "\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8"
+ "\xe9\xea\xeb\xec\xed\xee\xef\xf0",
+ .psize = 240,
+ .digest = "\x10\x19\x4a\x5c",
+ .np = 2,
+ .tap = { 31, 209 }
+ },
+
};
#endif /* _CRYPTO_TESTMGR_H */
diff --git a/crypto/tgr192.c b/crypto/tgr192.c
index cbca4f208c9..87403556fd0 100644
--- a/crypto/tgr192.c
+++ b/crypto/tgr192.c
@@ -628,7 +628,7 @@ static int tgr128_final(struct shash_desc *desc, u8 * out)
return 0;
}
-static struct shash_alg tgr192 = {
+static struct shash_alg tgr_algs[3] = { {
.digestsize = TGR192_DIGEST_SIZE,
.init = tgr192_init,
.update = tgr192_update,
@@ -640,9 +640,7 @@ static struct shash_alg tgr192 = {
.cra_blocksize = TGR192_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
-};
-
-static struct shash_alg tgr160 = {
+}, {
.digestsize = TGR160_DIGEST_SIZE,
.init = tgr192_init,
.update = tgr192_update,
@@ -654,9 +652,7 @@ static struct shash_alg tgr160 = {
.cra_blocksize = TGR192_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
-};
-
-static struct shash_alg tgr128 = {
+}, {
.digestsize = TGR128_DIGEST_SIZE,
.init = tgr192_init,
.update = tgr192_update,
@@ -668,38 +664,16 @@ static struct shash_alg tgr128 = {
.cra_blocksize = TGR192_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
-};
+} };
static int __init tgr192_mod_init(void)
{
- int ret = 0;
-
- ret = crypto_register_shash(&tgr192);
-
- if (ret < 0) {
- goto out;
- }
-
- ret = crypto_register_shash(&tgr160);
- if (ret < 0) {
- crypto_unregister_shash(&tgr192);
- goto out;
- }
-
- ret = crypto_register_shash(&tgr128);
- if (ret < 0) {
- crypto_unregister_shash(&tgr192);
- crypto_unregister_shash(&tgr160);
- }
- out:
- return ret;
+ return crypto_register_shashes(tgr_algs, ARRAY_SIZE(tgr_algs));
}
static void __exit tgr192_mod_fini(void)
{
- crypto_unregister_shash(&tgr192);
- crypto_unregister_shash(&tgr160);
- crypto_unregister_shash(&tgr128);
+ crypto_unregister_shashes(tgr_algs, ARRAY_SIZE(tgr_algs));
}
MODULE_ALIAS("tgr160");
diff --git a/crypto/twofish_common.c b/crypto/twofish_common.c
index 0af216c75d7..5f62c4f9f6e 100644
--- a/crypto/twofish_common.c
+++ b/crypto/twofish_common.c
@@ -580,12 +580,9 @@ static const u8 calc_sb_tbl[512] = {
ctx->a[(j) + 1] = rol32(y, 9)
/* Perform the key setup. */
-int twofish_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int key_len)
+int __twofish_setkey(struct twofish_ctx *ctx, const u8 *key,
+ unsigned int key_len, u32 *flags)
{
-
- struct twofish_ctx *ctx = crypto_tfm_ctx(tfm);
- u32 *flags = &tfm->crt_flags;
-
int i, j, k;
/* Temporaries for CALC_K. */
@@ -701,7 +698,13 @@ int twofish_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int key_len)
return 0;
}
+EXPORT_SYMBOL_GPL(__twofish_setkey);
+int twofish_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int key_len)
+{
+ return __twofish_setkey(crypto_tfm_ctx(tfm), key, key_len,
+ &tfm->crt_flags);
+}
EXPORT_SYMBOL_GPL(twofish_setkey);
MODULE_LICENSE("GPL");
diff --git a/crypto/twofish.c b/crypto/twofish_generic.c
index dfcda231f87..2d5000552d0 100644
--- a/crypto/twofish.c
+++ b/crypto/twofish_generic.c
@@ -188,7 +188,6 @@ static struct crypto_alg alg = {
.cra_ctxsize = sizeof(struct twofish_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(alg.cra_list),
.cra_u = { .cipher = {
.cia_min_keysize = TF_MIN_KEY_SIZE,
.cia_max_keysize = TF_MAX_KEY_SIZE,
@@ -212,3 +211,4 @@ module_exit(twofish_mod_fini);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION ("Twofish Cipher Algorithm");
+MODULE_ALIAS("twofish");
diff --git a/crypto/vmac.c b/crypto/vmac.c
new file mode 100644
index 00000000000..2eb11a30c29
--- /dev/null
+++ b/crypto/vmac.c
@@ -0,0 +1,715 @@
+/*
+ * Modified to interface to the Linux kernel
+ * Copyright (c) 2009, Intel Corporation.
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc., 59 Temple
+ * Place - Suite 330, Boston, MA 02111-1307 USA.
+ */
+
+/* --------------------------------------------------------------------------
+ * VMAC and VHASH Implementation by Ted Krovetz (tdk@acm.org) and Wei Dai.
+ * This implementation is herby placed in the public domain.
+ * The authors offers no warranty. Use at your own risk.
+ * Please send bug reports to the authors.
+ * Last modified: 17 APR 08, 1700 PDT
+ * ----------------------------------------------------------------------- */
+
+#include <linux/init.h>
+#include <linux/types.h>
+#include <linux/crypto.h>
+#include <linux/module.h>
+#include <linux/scatterlist.h>
+#include <asm/byteorder.h>
+#include <crypto/scatterwalk.h>
+#include <crypto/vmac.h>
+#include <crypto/internal/hash.h>
+
+/*
+ * Constants and masks
+ */
+#define UINT64_C(x) x##ULL
+static const u64 p64 = UINT64_C(0xfffffffffffffeff); /* 2^64 - 257 prime */
+static const u64 m62 = UINT64_C(0x3fffffffffffffff); /* 62-bit mask */
+static const u64 m63 = UINT64_C(0x7fffffffffffffff); /* 63-bit mask */
+static const u64 m64 = UINT64_C(0xffffffffffffffff); /* 64-bit mask */
+static const u64 mpoly = UINT64_C(0x1fffffff1fffffff); /* Poly key mask */
+
+#define pe64_to_cpup le64_to_cpup /* Prefer little endian */
+
+#ifdef __LITTLE_ENDIAN
+#define INDEX_HIGH 1
+#define INDEX_LOW 0
+#else
+#define INDEX_HIGH 0
+#define INDEX_LOW 1
+#endif
+
+/*
+ * The following routines are used in this implementation. They are
+ * written via macros to simulate zero-overhead call-by-reference.
+ *
+ * MUL64: 64x64->128-bit multiplication
+ * PMUL64: assumes top bits cleared on inputs
+ * ADD128: 128x128->128-bit addition
+ */
+
+#define ADD128(rh, rl, ih, il) \
+ do { \
+ u64 _il = (il); \
+ (rl) += (_il); \
+ if ((rl) < (_il)) \
+ (rh)++; \
+ (rh) += (ih); \
+ } while (0)
+
+#define MUL32(i1, i2) ((u64)(u32)(i1)*(u32)(i2))
+
+#define PMUL64(rh, rl, i1, i2) /* Assumes m doesn't overflow */ \
+ do { \
+ u64 _i1 = (i1), _i2 = (i2); \
+ u64 m = MUL32(_i1, _i2>>32) + MUL32(_i1>>32, _i2); \
+ rh = MUL32(_i1>>32, _i2>>32); \
+ rl = MUL32(_i1, _i2); \
+ ADD128(rh, rl, (m >> 32), (m << 32)); \
+ } while (0)
+
+#define MUL64(rh, rl, i1, i2) \
+ do { \
+ u64 _i1 = (i1), _i2 = (i2); \
+ u64 m1 = MUL32(_i1, _i2>>32); \
+ u64 m2 = MUL32(_i1>>32, _i2); \
+ rh = MUL32(_i1>>32, _i2>>32); \
+ rl = MUL32(_i1, _i2); \
+ ADD128(rh, rl, (m1 >> 32), (m1 << 32)); \
+ ADD128(rh, rl, (m2 >> 32), (m2 << 32)); \
+ } while (0)
+
+/*
+ * For highest performance the L1 NH and L2 polynomial hashes should be
+ * carefully implemented to take advantage of one's target architecture.
+ * Here these two hash functions are defined multiple time; once for
+ * 64-bit architectures, once for 32-bit SSE2 architectures, and once
+ * for the rest (32-bit) architectures.
+ * For each, nh_16 *must* be defined (works on multiples of 16 bytes).
+ * Optionally, nh_vmac_nhbytes can be defined (for multiples of
+ * VMAC_NHBYTES), and nh_16_2 and nh_vmac_nhbytes_2 (versions that do two
+ * NH computations at once).
+ */
+
+#ifdef CONFIG_64BIT
+
+#define nh_16(mp, kp, nw, rh, rl) \
+ do { \
+ int i; u64 th, tl; \
+ rh = rl = 0; \
+ for (i = 0; i < nw; i += 2) { \
+ MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i], \
+ pe64_to_cpup((mp)+i+1)+(kp)[i+1]); \
+ ADD128(rh, rl, th, tl); \
+ } \
+ } while (0)
+
+#define nh_16_2(mp, kp, nw, rh, rl, rh1, rl1) \
+ do { \
+ int i; u64 th, tl; \
+ rh1 = rl1 = rh = rl = 0; \
+ for (i = 0; i < nw; i += 2) { \
+ MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i], \
+ pe64_to_cpup((mp)+i+1)+(kp)[i+1]); \
+ ADD128(rh, rl, th, tl); \
+ MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i+2], \
+ pe64_to_cpup((mp)+i+1)+(kp)[i+3]); \
+ ADD128(rh1, rl1, th, tl); \
+ } \
+ } while (0)
+
+#if (VMAC_NHBYTES >= 64) /* These versions do 64-bytes of message at a time */
+#define nh_vmac_nhbytes(mp, kp, nw, rh, rl) \
+ do { \
+ int i; u64 th, tl; \
+ rh = rl = 0; \
+ for (i = 0; i < nw; i += 8) { \
+ MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i], \
+ pe64_to_cpup((mp)+i+1)+(kp)[i+1]); \
+ ADD128(rh, rl, th, tl); \
+ MUL64(th, tl, pe64_to_cpup((mp)+i+2)+(kp)[i+2], \
+ pe64_to_cpup((mp)+i+3)+(kp)[i+3]); \
+ ADD128(rh, rl, th, tl); \
+ MUL64(th, tl, pe64_to_cpup((mp)+i+4)+(kp)[i+4], \
+ pe64_to_cpup((mp)+i+5)+(kp)[i+5]); \
+ ADD128(rh, rl, th, tl); \
+ MUL64(th, tl, pe64_to_cpup((mp)+i+6)+(kp)[i+6], \
+ pe64_to_cpup((mp)+i+7)+(kp)[i+7]); \
+ ADD128(rh, rl, th, tl); \
+ } \
+ } while (0)
+
+#define nh_vmac_nhbytes_2(mp, kp, nw, rh, rl, rh1, rl1) \
+ do { \
+ int i; u64 th, tl; \
+ rh1 = rl1 = rh = rl = 0; \
+ for (i = 0; i < nw; i += 8) { \
+ MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i], \
+ pe64_to_cpup((mp)+i+1)+(kp)[i+1]); \
+ ADD128(rh, rl, th, tl); \
+ MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i+2], \
+ pe64_to_cpup((mp)+i+1)+(kp)[i+3]); \
+ ADD128(rh1, rl1, th, tl); \
+ MUL64(th, tl, pe64_to_cpup((mp)+i+2)+(kp)[i+2], \
+ pe64_to_cpup((mp)+i+3)+(kp)[i+3]); \
+ ADD128(rh, rl, th, tl); \
+ MUL64(th, tl, pe64_to_cpup((mp)+i+2)+(kp)[i+4], \
+ pe64_to_cpup((mp)+i+3)+(kp)[i+5]); \
+ ADD128(rh1, rl1, th, tl); \
+ MUL64(th, tl, pe64_to_cpup((mp)+i+4)+(kp)[i+4], \
+ pe64_to_cpup((mp)+i+5)+(kp)[i+5]); \
+ ADD128(rh, rl, th, tl); \
+ MUL64(th, tl, pe64_to_cpup((mp)+i+4)+(kp)[i+6], \
+ pe64_to_cpup((mp)+i+5)+(kp)[i+7]); \
+ ADD128(rh1, rl1, th, tl); \
+ MUL64(th, tl, pe64_to_cpup((mp)+i+6)+(kp)[i+6], \
+ pe64_to_cpup((mp)+i+7)+(kp)[i+7]); \
+ ADD128(rh, rl, th, tl); \
+ MUL64(th, tl, pe64_to_cpup((mp)+i+6)+(kp)[i+8], \
+ pe64_to_cpup((mp)+i+7)+(kp)[i+9]); \
+ ADD128(rh1, rl1, th, tl); \
+ } \
+ } while (0)
+#endif
+
+#define poly_step(ah, al, kh, kl, mh, ml) \
+ do { \
+ u64 t1h, t1l, t2h, t2l, t3h, t3l, z = 0; \
+ /* compute ab*cd, put bd into result registers */ \
+ PMUL64(t3h, t3l, al, kh); \
+ PMUL64(t2h, t2l, ah, kl); \
+ PMUL64(t1h, t1l, ah, 2*kh); \
+ PMUL64(ah, al, al, kl); \
+ /* add 2 * ac to result */ \
+ ADD128(ah, al, t1h, t1l); \
+ /* add together ad + bc */ \
+ ADD128(t2h, t2l, t3h, t3l); \
+ /* now (ah,al), (t2l,2*t2h) need summing */ \
+ /* first add the high registers, carrying into t2h */ \
+ ADD128(t2h, ah, z, t2l); \
+ /* double t2h and add top bit of ah */ \
+ t2h = 2 * t2h + (ah >> 63); \
+ ah &= m63; \
+ /* now add the low registers */ \
+ ADD128(ah, al, mh, ml); \
+ ADD128(ah, al, z, t2h); \
+ } while (0)
+
+#else /* ! CONFIG_64BIT */
+
+#ifndef nh_16
+#define nh_16(mp, kp, nw, rh, rl) \
+ do { \
+ u64 t1, t2, m1, m2, t; \
+ int i; \
+ rh = rl = t = 0; \
+ for (i = 0; i < nw; i += 2) { \
+ t1 = pe64_to_cpup(mp+i) + kp[i]; \
+ t2 = pe64_to_cpup(mp+i+1) + kp[i+1]; \
+ m2 = MUL32(t1 >> 32, t2); \
+ m1 = MUL32(t1, t2 >> 32); \
+ ADD128(rh, rl, MUL32(t1 >> 32, t2 >> 32), \
+ MUL32(t1, t2)); \
+ rh += (u64)(u32)(m1 >> 32) \
+ + (u32)(m2 >> 32); \
+ t += (u64)(u32)m1 + (u32)m2; \
+ } \
+ ADD128(rh, rl, (t >> 32), (t << 32)); \
+ } while (0)
+#endif
+
+static void poly_step_func(u64 *ahi, u64 *alo,
+ const u64 *kh, const u64 *kl,
+ const u64 *mh, const u64 *ml)
+{
+#define a0 (*(((u32 *)alo)+INDEX_LOW))
+#define a1 (*(((u32 *)alo)+INDEX_HIGH))
+#define a2 (*(((u32 *)ahi)+INDEX_LOW))
+#define a3 (*(((u32 *)ahi)+INDEX_HIGH))
+#define k0 (*(((u32 *)kl)+INDEX_LOW))
+#define k1 (*(((u32 *)kl)+INDEX_HIGH))
+#define k2 (*(((u32 *)kh)+INDEX_LOW))
+#define k3 (*(((u32 *)kh)+INDEX_HIGH))
+
+ u64 p, q, t;
+ u32 t2;
+
+ p = MUL32(a3, k3);
+ p += p;
+ p += *(u64 *)mh;
+ p += MUL32(a0, k2);
+ p += MUL32(a1, k1);
+ p += MUL32(a2, k0);
+ t = (u32)(p);
+ p >>= 32;
+ p += MUL32(a0, k3);
+ p += MUL32(a1, k2);
+ p += MUL32(a2, k1);
+ p += MUL32(a3, k0);
+ t |= ((u64)((u32)p & 0x7fffffff)) << 32;
+ p >>= 31;
+ p += (u64)(((u32 *)ml)[INDEX_LOW]);
+ p += MUL32(a0, k0);
+ q = MUL32(a1, k3);
+ q += MUL32(a2, k2);
+ q += MUL32(a3, k1);
+ q += q;
+ p += q;
+ t2 = (u32)(p);
+ p >>= 32;
+ p += (u64)(((u32 *)ml)[INDEX_HIGH]);
+ p += MUL32(a0, k1);
+ p += MUL32(a1, k0);
+ q = MUL32(a2, k3);
+ q += MUL32(a3, k2);
+ q += q;
+ p += q;
+ *(u64 *)(alo) = (p << 32) | t2;
+ p >>= 32;
+ *(u64 *)(ahi) = p + t;
+
+#undef a0
+#undef a1
+#undef a2
+#undef a3
+#undef k0
+#undef k1
+#undef k2
+#undef k3
+}
+
+#define poly_step(ah, al, kh, kl, mh, ml) \
+ poly_step_func(&(ah), &(al), &(kh), &(kl), &(mh), &(ml))
+
+#endif /* end of specialized NH and poly definitions */
+
+/* At least nh_16 is defined. Defined others as needed here */
+#ifndef nh_16_2
+#define nh_16_2(mp, kp, nw, rh, rl, rh2, rl2) \
+ do { \
+ nh_16(mp, kp, nw, rh, rl); \
+ nh_16(mp, ((kp)+2), nw, rh2, rl2); \
+ } while (0)
+#endif
+#ifndef nh_vmac_nhbytes
+#define nh_vmac_nhbytes(mp, kp, nw, rh, rl) \
+ nh_16(mp, kp, nw, rh, rl)
+#endif
+#ifndef nh_vmac_nhbytes_2
+#define nh_vmac_nhbytes_2(mp, kp, nw, rh, rl, rh2, rl2) \
+ do { \
+ nh_vmac_nhbytes(mp, kp, nw, rh, rl); \
+ nh_vmac_nhbytes(mp, ((kp)+2), nw, rh2, rl2); \
+ } while (0)
+#endif
+
+static void vhash_abort(struct vmac_ctx *ctx)
+{
+ ctx->polytmp[0] = ctx->polykey[0] ;
+ ctx->polytmp[1] = ctx->polykey[1] ;
+ ctx->first_block_processed = 0;
+}
+
+static u64 l3hash(u64 p1, u64 p2, u64 k1, u64 k2, u64 len)
+{
+ u64 rh, rl, t, z = 0;
+
+ /* fully reduce (p1,p2)+(len,0) mod p127 */
+ t = p1 >> 63;
+ p1 &= m63;
+ ADD128(p1, p2, len, t);
+ /* At this point, (p1,p2) is at most 2^127+(len<<64) */
+ t = (p1 > m63) + ((p1 == m63) && (p2 == m64));
+ ADD128(p1, p2, z, t);
+ p1 &= m63;
+
+ /* compute (p1,p2)/(2^64-2^32) and (p1,p2)%(2^64-2^32) */
+ t = p1 + (p2 >> 32);
+ t += (t >> 32);
+ t += (u32)t > 0xfffffffeu;
+ p1 += (t >> 32);
+ p2 += (p1 << 32);
+
+ /* compute (p1+k1)%p64 and (p2+k2)%p64 */
+ p1 += k1;
+ p1 += (0 - (p1 < k1)) & 257;
+ p2 += k2;
+ p2 += (0 - (p2 < k2)) & 257;
+
+ /* compute (p1+k1)*(p2+k2)%p64 */
+ MUL64(rh, rl, p1, p2);
+ t = rh >> 56;
+ ADD128(t, rl, z, rh);
+ rh <<= 8;
+ ADD128(t, rl, z, rh);
+ t += t << 8;
+ rl += t;
+ rl += (0 - (rl < t)) & 257;
+ rl += (0 - (rl > p64-1)) & 257;
+ return rl;
+}
+
+static void vhash_update(const unsigned char *m,
+ unsigned int mbytes, /* Pos multiple of VMAC_NHBYTES */
+ struct vmac_ctx *ctx)
+{
+ u64 rh, rl, *mptr;
+ const u64 *kptr = (u64 *)ctx->nhkey;
+ int i;
+ u64 ch, cl;
+ u64 pkh = ctx->polykey[0];
+ u64 pkl = ctx->polykey[1];
+
+ if (!mbytes)
+ return;
+
+ BUG_ON(mbytes % VMAC_NHBYTES);
+
+ mptr = (u64 *)m;
+ i = mbytes / VMAC_NHBYTES; /* Must be non-zero */
+
+ ch = ctx->polytmp[0];
+ cl = ctx->polytmp[1];
+
+ if (!ctx->first_block_processed) {
+ ctx->first_block_processed = 1;
+ nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, rh, rl);
+ rh &= m62;
+ ADD128(ch, cl, rh, rl);
+ mptr += (VMAC_NHBYTES/sizeof(u64));
+ i--;
+ }
+
+ while (i--) {
+ nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, rh, rl);
+ rh &= m62;
+ poly_step(ch, cl, pkh, pkl, rh, rl);
+ mptr += (VMAC_NHBYTES/sizeof(u64));
+ }
+
+ ctx->polytmp[0] = ch;
+ ctx->polytmp[1] = cl;
+}
+
+static u64 vhash(unsigned char m[], unsigned int mbytes,
+ u64 *tagl, struct vmac_ctx *ctx)
+{
+ u64 rh, rl, *mptr;
+ const u64 *kptr = (u64 *)ctx->nhkey;
+ int i, remaining;
+ u64 ch, cl;
+ u64 pkh = ctx->polykey[0];
+ u64 pkl = ctx->polykey[1];
+
+ mptr = (u64 *)m;
+ i = mbytes / VMAC_NHBYTES;
+ remaining = mbytes % VMAC_NHBYTES;
+
+ if (ctx->first_block_processed) {
+ ch = ctx->polytmp[0];
+ cl = ctx->polytmp[1];
+ } else if (i) {
+ nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, ch, cl);
+ ch &= m62;
+ ADD128(ch, cl, pkh, pkl);
+ mptr += (VMAC_NHBYTES/sizeof(u64));
+ i--;
+ } else if (remaining) {
+ nh_16(mptr, kptr, 2*((remaining+15)/16), ch, cl);
+ ch &= m62;
+ ADD128(ch, cl, pkh, pkl);
+ mptr += (VMAC_NHBYTES/sizeof(u64));
+ goto do_l3;
+ } else {/* Empty String */
+ ch = pkh; cl = pkl;
+ goto do_l3;
+ }
+
+ while (i--) {
+ nh_vmac_nhbytes(mptr, kptr, VMAC_NHBYTES/8, rh, rl);
+ rh &= m62;
+ poly_step(ch, cl, pkh, pkl, rh, rl);
+ mptr += (VMAC_NHBYTES/sizeof(u64));
+ }
+ if (remaining) {
+ nh_16(mptr, kptr, 2*((remaining+15)/16), rh, rl);
+ rh &= m62;
+ poly_step(ch, cl, pkh, pkl, rh, rl);
+ }
+
+do_l3:
+ vhash_abort(ctx);
+ remaining *= 8;
+ return l3hash(ch, cl, ctx->l3key[0], ctx->l3key[1], remaining);
+}
+
+static u64 vmac(unsigned char m[], unsigned int mbytes,
+ const unsigned char n[16], u64 *tagl,
+ struct vmac_ctx_t *ctx)
+{
+ u64 *in_n, *out_p;
+ u64 p, h;
+ int i;
+
+ in_n = ctx->__vmac_ctx.cached_nonce;
+ out_p = ctx->__vmac_ctx.cached_aes;
+
+ i = n[15] & 1;
+ if ((*(u64 *)(n+8) != in_n[1]) || (*(u64 *)(n) != in_n[0])) {
+ in_n[0] = *(u64 *)(n);
+ in_n[1] = *(u64 *)(n+8);
+ ((unsigned char *)in_n)[15] &= 0xFE;
+ crypto_cipher_encrypt_one(ctx->child,
+ (unsigned char *)out_p, (unsigned char *)in_n);
+
+ ((unsigned char *)in_n)[15] |= (unsigned char)(1-i);
+ }
+ p = be64_to_cpup(out_p + i);
+ h = vhash(m, mbytes, (u64 *)0, &ctx->__vmac_ctx);
+ return le64_to_cpu(p + h);
+}
+
+static int vmac_set_key(unsigned char user_key[], struct vmac_ctx_t *ctx)
+{
+ u64 in[2] = {0}, out[2];
+ unsigned i;
+ int err = 0;
+
+ err = crypto_cipher_setkey(ctx->child, user_key, VMAC_KEY_LEN);
+ if (err)
+ return err;
+
+ /* Fill nh key */
+ ((unsigned char *)in)[0] = 0x80;
+ for (i = 0; i < sizeof(ctx->__vmac_ctx.nhkey)/8; i += 2) {
+ crypto_cipher_encrypt_one(ctx->child,
+ (unsigned char *)out, (unsigned char *)in);
+ ctx->__vmac_ctx.nhkey[i] = be64_to_cpup(out);
+ ctx->__vmac_ctx.nhkey[i+1] = be64_to_cpup(out+1);
+ ((unsigned char *)in)[15] += 1;
+ }
+
+ /* Fill poly key */
+ ((unsigned char *)in)[0] = 0xC0;
+ in[1] = 0;
+ for (i = 0; i < sizeof(ctx->__vmac_ctx.polykey)/8; i += 2) {
+ crypto_cipher_encrypt_one(ctx->child,
+ (unsigned char *)out, (unsigned char *)in);
+ ctx->__vmac_ctx.polytmp[i] =
+ ctx->__vmac_ctx.polykey[i] =
+ be64_to_cpup(out) & mpoly;
+ ctx->__vmac_ctx.polytmp[i+1] =
+ ctx->__vmac_ctx.polykey[i+1] =
+ be64_to_cpup(out+1) & mpoly;
+ ((unsigned char *)in)[15] += 1;
+ }
+
+ /* Fill ip key */
+ ((unsigned char *)in)[0] = 0xE0;
+ in[1] = 0;
+ for (i = 0; i < sizeof(ctx->__vmac_ctx.l3key)/8; i += 2) {
+ do {
+ crypto_cipher_encrypt_one(ctx->child,
+ (unsigned char *)out, (unsigned char *)in);
+ ctx->__vmac_ctx.l3key[i] = be64_to_cpup(out);
+ ctx->__vmac_ctx.l3key[i+1] = be64_to_cpup(out+1);
+ ((unsigned char *)in)[15] += 1;
+ } while (ctx->__vmac_ctx.l3key[i] >= p64
+ || ctx->__vmac_ctx.l3key[i+1] >= p64);
+ }
+
+ /* Invalidate nonce/aes cache and reset other elements */
+ ctx->__vmac_ctx.cached_nonce[0] = (u64)-1; /* Ensure illegal nonce */
+ ctx->__vmac_ctx.cached_nonce[1] = (u64)0; /* Ensure illegal nonce */
+ ctx->__vmac_ctx.first_block_processed = 0;
+
+ return err;
+}
+
+static int vmac_setkey(struct crypto_shash *parent,
+ const u8 *key, unsigned int keylen)
+{
+ struct vmac_ctx_t *ctx = crypto_shash_ctx(parent);
+
+ if (keylen != VMAC_KEY_LEN) {
+ crypto_shash_set_flags(parent, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ return -EINVAL;
+ }
+
+ return vmac_set_key((u8 *)key, ctx);
+}
+
+static int vmac_init(struct shash_desc *pdesc)
+{
+ return 0;
+}
+
+static int vmac_update(struct shash_desc *pdesc, const u8 *p,
+ unsigned int len)
+{
+ struct crypto_shash *parent = pdesc->tfm;
+ struct vmac_ctx_t *ctx = crypto_shash_ctx(parent);
+ int expand;
+ int min;
+
+ expand = VMAC_NHBYTES - ctx->partial_size > 0 ?
+ VMAC_NHBYTES - ctx->partial_size : 0;
+
+ min = len < expand ? len : expand;
+
+ memcpy(ctx->partial + ctx->partial_size, p, min);
+ ctx->partial_size += min;
+
+ if (len < expand)
+ return 0;
+
+ vhash_update(ctx->partial, VMAC_NHBYTES, &ctx->__vmac_ctx);
+ ctx->partial_size = 0;
+
+ len -= expand;
+ p += expand;
+
+ if (len % VMAC_NHBYTES) {
+ memcpy(ctx->partial, p + len - (len % VMAC_NHBYTES),
+ len % VMAC_NHBYTES);
+ ctx->partial_size = len % VMAC_NHBYTES;
+ }
+
+ vhash_update(p, len - len % VMAC_NHBYTES, &ctx->__vmac_ctx);
+
+ return 0;
+}
+
+static int vmac_final(struct shash_desc *pdesc, u8 *out)
+{
+ struct crypto_shash *parent = pdesc->tfm;
+ struct vmac_ctx_t *ctx = crypto_shash_ctx(parent);
+ vmac_t mac;
+ u8 nonce[16] = {};
+
+ /* vmac() ends up accessing outside the array bounds that
+ * we specify. In appears to access up to the next 2-word
+ * boundary. We'll just be uber cautious and zero the
+ * unwritten bytes in the buffer.
+ */
+ if (ctx->partial_size) {
+ memset(ctx->partial + ctx->partial_size, 0,
+ VMAC_NHBYTES - ctx->partial_size);
+ }
+ mac = vmac(ctx->partial, ctx->partial_size, nonce, NULL, ctx);
+ memcpy(out, &mac, sizeof(vmac_t));
+ memset(&mac, 0, sizeof(vmac_t));
+ memset(&ctx->__vmac_ctx, 0, sizeof(struct vmac_ctx));
+ ctx->partial_size = 0;
+ return 0;
+}
+
+static int vmac_init_tfm(struct crypto_tfm *tfm)
+{
+ struct crypto_cipher *cipher;
+ struct crypto_instance *inst = (void *)tfm->__crt_alg;
+ struct crypto_spawn *spawn = crypto_instance_ctx(inst);
+ struct vmac_ctx_t *ctx = crypto_tfm_ctx(tfm);
+
+ cipher = crypto_spawn_cipher(spawn);
+ if (IS_ERR(cipher))
+ return PTR_ERR(cipher);
+
+ ctx->child = cipher;
+ return 0;
+}
+
+static void vmac_exit_tfm(struct crypto_tfm *tfm)
+{
+ struct vmac_ctx_t *ctx = crypto_tfm_ctx(tfm);
+ crypto_free_cipher(ctx->child);
+}
+
+static int vmac_create(struct crypto_template *tmpl, struct rtattr **tb)
+{
+ struct shash_instance *inst;
+ struct crypto_alg *alg;
+ int err;
+
+ err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
+ if (err)
+ return err;
+
+ alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
+ CRYPTO_ALG_TYPE_MASK);
+ if (IS_ERR(alg))
+ return PTR_ERR(alg);
+
+ inst = shash_alloc_instance("vmac", alg);
+ err = PTR_ERR(inst);
+ if (IS_ERR(inst))
+ goto out_put_alg;
+
+ err = crypto_init_spawn(shash_instance_ctx(inst), alg,
+ shash_crypto_instance(inst),
+ CRYPTO_ALG_TYPE_MASK);
+ if (err)
+ goto out_free_inst;
+
+ inst->alg.base.cra_priority = alg->cra_priority;
+ inst->alg.base.cra_blocksize = alg->cra_blocksize;
+ inst->alg.base.cra_alignmask = alg->cra_alignmask;
+
+ inst->alg.digestsize = sizeof(vmac_t);
+ inst->alg.base.cra_ctxsize = sizeof(struct vmac_ctx_t);
+ inst->alg.base.cra_init = vmac_init_tfm;
+ inst->alg.base.cra_exit = vmac_exit_tfm;
+
+ inst->alg.init = vmac_init;
+ inst->alg.update = vmac_update;
+ inst->alg.final = vmac_final;
+ inst->alg.setkey = vmac_setkey;
+
+ err = shash_register_instance(tmpl, inst);
+ if (err) {
+out_free_inst:
+ shash_free_instance(shash_crypto_instance(inst));
+ }
+
+out_put_alg:
+ crypto_mod_put(alg);
+ return err;
+}
+
+static struct crypto_template vmac_tmpl = {
+ .name = "vmac",
+ .create = vmac_create,
+ .free = shash_free_instance,
+ .module = THIS_MODULE,
+};
+
+static int __init vmac_module_init(void)
+{
+ return crypto_register_template(&vmac_tmpl);
+}
+
+static void __exit vmac_module_exit(void)
+{
+ crypto_unregister_template(&vmac_tmpl);
+}
+
+module_init(vmac_module_init);
+module_exit(vmac_module_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("VMAC hash algorithm");
diff --git a/crypto/wp512.c b/crypto/wp512.c
index 72342727368..180f1d6e03f 100644
--- a/crypto/wp512.c
+++ b/crypto/wp512.c
@@ -762,11 +762,17 @@ static const u64 C7[256] = {
0x86228644a411c286ULL,
};
-static const u64 rc[WHIRLPOOL_ROUNDS + 1] = {
- 0x0000000000000000ULL, 0x1823c6e887b8014fULL, 0x36a6d2f5796f9152ULL,
- 0x60bc9b8ea30c7b35ULL, 0x1de0d7c22e4bfe57ULL, 0x157737e59ff04adaULL,
- 0x58c9290ab1a06b85ULL, 0xbd5d10f4cb3e0567ULL, 0xe427418ba77d95d8ULL,
- 0xfbee7c66dd17479eULL, 0xca2dbf07ad5a8333ULL,
+static const u64 rc[WHIRLPOOL_ROUNDS] = {
+ 0x1823c6e887b8014fULL,
+ 0x36a6d2f5796f9152ULL,
+ 0x60bc9b8ea30c7b35ULL,
+ 0x1de0d7c22e4bfe57ULL,
+ 0x157737e59ff04adaULL,
+ 0x58c9290ab1a06b85ULL,
+ 0xbd5d10f4cb3e0567ULL,
+ 0xe427418ba77d95d8ULL,
+ 0xfbee7c66dd17479eULL,
+ 0xca2dbf07ad5a8333ULL,
};
/**
@@ -793,7 +799,7 @@ static void wp512_process_buffer(struct wp512_ctx *wctx) {
state[6] = block[6] ^ (K[6] = wctx->hash[6]);
state[7] = block[7] ^ (K[7] = wctx->hash[7]);
- for (r = 1; r <= WHIRLPOOL_ROUNDS; r++) {
+ for (r = 0; r < WHIRLPOOL_ROUNDS; r++) {
L[0] = C0[(int)(K[0] >> 56) ] ^
C1[(int)(K[7] >> 48) & 0xff] ^
@@ -1113,7 +1119,7 @@ static int wp256_final(struct shash_desc *desc, u8 *out)
return 0;
}
-static struct shash_alg wp512 = {
+static struct shash_alg wp_algs[3] = { {
.digestsize = WP512_DIGEST_SIZE,
.init = wp512_init,
.update = wp512_update,
@@ -1125,9 +1131,7 @@ static struct shash_alg wp512 = {
.cra_blocksize = WP512_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
-};
-
-static struct shash_alg wp384 = {
+}, {
.digestsize = WP384_DIGEST_SIZE,
.init = wp512_init,
.update = wp512_update,
@@ -1139,9 +1143,7 @@ static struct shash_alg wp384 = {
.cra_blocksize = WP512_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
-};
-
-static struct shash_alg wp256 = {
+}, {
.digestsize = WP256_DIGEST_SIZE,
.init = wp512_init,
.update = wp512_update,
@@ -1153,39 +1155,16 @@ static struct shash_alg wp256 = {
.cra_blocksize = WP512_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
-};
+} };
static int __init wp512_mod_init(void)
{
- int ret = 0;
-
- ret = crypto_register_shash(&wp512);
-
- if (ret < 0)
- goto out;
-
- ret = crypto_register_shash(&wp384);
- if (ret < 0)
- {
- crypto_unregister_shash(&wp512);
- goto out;
- }
-
- ret = crypto_register_shash(&wp256);
- if (ret < 0)
- {
- crypto_unregister_shash(&wp512);
- crypto_unregister_shash(&wp384);
- }
-out:
- return ret;
+ return crypto_register_shashes(wp_algs, ARRAY_SIZE(wp_algs));
}
static void __exit wp512_mod_fini(void)
{
- crypto_unregister_shash(&wp512);
- crypto_unregister_shash(&wp384);
- crypto_unregister_shash(&wp256);
+ crypto_unregister_shashes(wp_algs, ARRAY_SIZE(wp_algs));
}
MODULE_ALIAS("wp384");
diff --git a/crypto/xcbc.c b/crypto/xcbc.c
index b63b633e549..a5fbdf3738c 100644
--- a/crypto/xcbc.c
+++ b/crypto/xcbc.c
@@ -19,211 +19,143 @@
* Kazunori Miyazawa <miyazawa@linux-ipv6.org>
*/
-#include <crypto/scatterwalk.h>
-#include <linux/crypto.h>
+#include <crypto/internal/hash.h>
#include <linux/err.h>
-#include <linux/hardirq.h>
#include <linux/kernel.h>
-#include <linux/mm.h>
-#include <linux/rtnetlink.h>
-#include <linux/slab.h>
-#include <linux/scatterlist.h>
+#include <linux/module.h>
static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101,
0x02020202, 0x02020202, 0x02020202, 0x02020202,
0x03030303, 0x03030303, 0x03030303, 0x03030303};
+
/*
* +------------------------
* | <parent tfm>
* +------------------------
- * | crypto_xcbc_ctx
+ * | xcbc_tfm_ctx
* +------------------------
- * | odds (block size)
+ * | consts (block size * 2)
* +------------------------
- * | prev (block size)
+ */
+struct xcbc_tfm_ctx {
+ struct crypto_cipher *child;
+ u8 ctx[];
+};
+
+/*
* +------------------------
- * | key (block size)
+ * | <shash desc>
* +------------------------
- * | consts (block size * 3)
+ * | xcbc_desc_ctx
+ * +------------------------
+ * | odds (block size)
+ * +------------------------
+ * | prev (block size)
* +------------------------
*/
-struct crypto_xcbc_ctx {
- struct crypto_cipher *child;
- u8 *odds;
- u8 *prev;
- u8 *key;
- u8 *consts;
- void (*xor)(u8 *a, const u8 *b, unsigned int bs);
- unsigned int keylen;
+struct xcbc_desc_ctx {
unsigned int len;
+ u8 ctx[];
};
-static void xor_128(u8 *a, const u8 *b, unsigned int bs)
-{
- ((u32 *)a)[0] ^= ((u32 *)b)[0];
- ((u32 *)a)[1] ^= ((u32 *)b)[1];
- ((u32 *)a)[2] ^= ((u32 *)b)[2];
- ((u32 *)a)[3] ^= ((u32 *)b)[3];
-}
-
-static int _crypto_xcbc_digest_setkey(struct crypto_hash *parent,
- struct crypto_xcbc_ctx *ctx)
+static int crypto_xcbc_digest_setkey(struct crypto_shash *parent,
+ const u8 *inkey, unsigned int keylen)
{
- int bs = crypto_hash_blocksize(parent);
+ unsigned long alignmask = crypto_shash_alignmask(parent);
+ struct xcbc_tfm_ctx *ctx = crypto_shash_ctx(parent);
+ int bs = crypto_shash_blocksize(parent);
+ u8 *consts = PTR_ALIGN(&ctx->ctx[0], alignmask + 1);
int err = 0;
u8 key1[bs];
- if ((err = crypto_cipher_setkey(ctx->child, ctx->key, ctx->keylen)))
- return err;
+ if ((err = crypto_cipher_setkey(ctx->child, inkey, keylen)))
+ return err;
- crypto_cipher_encrypt_one(ctx->child, key1, ctx->consts);
+ crypto_cipher_encrypt_one(ctx->child, consts, (u8 *)ks + bs);
+ crypto_cipher_encrypt_one(ctx->child, consts + bs, (u8 *)ks + bs * 2);
+ crypto_cipher_encrypt_one(ctx->child, key1, (u8 *)ks);
return crypto_cipher_setkey(ctx->child, key1, bs);
-}
-
-static int crypto_xcbc_digest_setkey(struct crypto_hash *parent,
- const u8 *inkey, unsigned int keylen)
-{
- struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
-
- if (keylen != crypto_cipher_blocksize(ctx->child))
- return -EINVAL;
- ctx->keylen = keylen;
- memcpy(ctx->key, inkey, keylen);
- ctx->consts = (u8*)ks;
-
- return _crypto_xcbc_digest_setkey(parent, ctx);
}
-static int crypto_xcbc_digest_init(struct hash_desc *pdesc)
+static int crypto_xcbc_digest_init(struct shash_desc *pdesc)
{
- struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(pdesc->tfm);
- int bs = crypto_hash_blocksize(pdesc->tfm);
+ unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm);
+ struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc);
+ int bs = crypto_shash_blocksize(pdesc->tfm);
+ u8 *prev = PTR_ALIGN(&ctx->ctx[0], alignmask + 1) + bs;
ctx->len = 0;
- memset(ctx->odds, 0, bs);
- memset(ctx->prev, 0, bs);
+ memset(prev, 0, bs);
return 0;
}
-static int crypto_xcbc_digest_update2(struct hash_desc *pdesc,
- struct scatterlist *sg,
- unsigned int nbytes)
+static int crypto_xcbc_digest_update(struct shash_desc *pdesc, const u8 *p,
+ unsigned int len)
{
- struct crypto_hash *parent = pdesc->tfm;
- struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
- struct crypto_cipher *tfm = ctx->child;
- int bs = crypto_hash_blocksize(parent);
-
- for (;;) {
- struct page *pg = sg_page(sg);
- unsigned int offset = sg->offset;
- unsigned int slen = sg->length;
-
- if (unlikely(slen > nbytes))
- slen = nbytes;
-
- nbytes -= slen;
-
- while (slen > 0) {
- unsigned int len = min(slen, ((unsigned int)(PAGE_SIZE)) - offset);
- char *p = crypto_kmap(pg, 0) + offset;
-
- /* checking the data can fill the block */
- if ((ctx->len + len) <= bs) {
- memcpy(ctx->odds + ctx->len, p, len);
- ctx->len += len;
- slen -= len;
-
- /* checking the rest of the page */
- if (len + offset >= PAGE_SIZE) {
- offset = 0;
- pg++;
- } else
- offset += len;
-
- crypto_kunmap(p, 0);
- crypto_yield(pdesc->flags);
- continue;
- }
-
- /* filling odds with new data and encrypting it */
- memcpy(ctx->odds + ctx->len, p, bs - ctx->len);
- len -= bs - ctx->len;
- p += bs - ctx->len;
-
- ctx->xor(ctx->prev, ctx->odds, bs);
- crypto_cipher_encrypt_one(tfm, ctx->prev, ctx->prev);
-
- /* clearing the length */
- ctx->len = 0;
-
- /* encrypting the rest of data */
- while (len > bs) {
- ctx->xor(ctx->prev, p, bs);
- crypto_cipher_encrypt_one(tfm, ctx->prev,
- ctx->prev);
- p += bs;
- len -= bs;
- }
-
- /* keeping the surplus of blocksize */
- if (len) {
- memcpy(ctx->odds, p, len);
- ctx->len = len;
- }
- crypto_kunmap(p, 0);
- crypto_yield(pdesc->flags);
- slen -= min(slen, ((unsigned int)(PAGE_SIZE)) - offset);
- offset = 0;
- pg++;
- }
-
- if (!nbytes)
- break;
- sg = scatterwalk_sg_next(sg);
+ struct crypto_shash *parent = pdesc->tfm;
+ unsigned long alignmask = crypto_shash_alignmask(parent);
+ struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent);
+ struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc);
+ struct crypto_cipher *tfm = tctx->child;
+ int bs = crypto_shash_blocksize(parent);
+ u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1);
+ u8 *prev = odds + bs;
+
+ /* checking the data can fill the block */
+ if ((ctx->len + len) <= bs) {
+ memcpy(odds + ctx->len, p, len);
+ ctx->len += len;
+ return 0;
}
- return 0;
-}
+ /* filling odds with new data and encrypting it */
+ memcpy(odds + ctx->len, p, bs - ctx->len);
+ len -= bs - ctx->len;
+ p += bs - ctx->len;
-static int crypto_xcbc_digest_update(struct hash_desc *pdesc,
- struct scatterlist *sg,
- unsigned int nbytes)
-{
- if (WARN_ON_ONCE(in_irq()))
- return -EDEADLK;
- return crypto_xcbc_digest_update2(pdesc, sg, nbytes);
-}
+ crypto_xor(prev, odds, bs);
+ crypto_cipher_encrypt_one(tfm, prev, prev);
-static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out)
-{
- struct crypto_hash *parent = pdesc->tfm;
- struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent);
- struct crypto_cipher *tfm = ctx->child;
- int bs = crypto_hash_blocksize(parent);
- int err = 0;
-
- if (ctx->len == bs) {
- u8 key2[bs];
+ /* clearing the length */
+ ctx->len = 0;
- if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0)
- return err;
+ /* encrypting the rest of data */
+ while (len > bs) {
+ crypto_xor(prev, p, bs);
+ crypto_cipher_encrypt_one(tfm, prev, prev);
+ p += bs;
+ len -= bs;
+ }
- crypto_cipher_encrypt_one(tfm, key2,
- (u8 *)(ctx->consts + bs));
+ /* keeping the surplus of blocksize */
+ if (len) {
+ memcpy(odds, p, len);
+ ctx->len = len;
+ }
- ctx->xor(ctx->prev, ctx->odds, bs);
- ctx->xor(ctx->prev, key2, bs);
- _crypto_xcbc_digest_setkey(parent, ctx);
+ return 0;
+}
- crypto_cipher_encrypt_one(tfm, out, ctx->prev);
- } else {
- u8 key3[bs];
+static int crypto_xcbc_digest_final(struct shash_desc *pdesc, u8 *out)
+{
+ struct crypto_shash *parent = pdesc->tfm;
+ unsigned long alignmask = crypto_shash_alignmask(parent);
+ struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent);
+ struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc);
+ struct crypto_cipher *tfm = tctx->child;
+ int bs = crypto_shash_blocksize(parent);
+ u8 *consts = PTR_ALIGN(&tctx->ctx[0], alignmask + 1);
+ u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1);
+ u8 *prev = odds + bs;
+ unsigned int offset = 0;
+
+ if (ctx->len != bs) {
unsigned int rlen;
- u8 *p = ctx->odds + ctx->len;
+ u8 *p = odds + ctx->len;
+
*p = 0x80;
p++;
@@ -231,32 +163,15 @@ static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out)
if (rlen)
memset(p, 0, rlen);
- if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0)
- return err;
-
- crypto_cipher_encrypt_one(tfm, key3,
- (u8 *)(ctx->consts + bs * 2));
-
- ctx->xor(ctx->prev, ctx->odds, bs);
- ctx->xor(ctx->prev, key3, bs);
-
- _crypto_xcbc_digest_setkey(parent, ctx);
-
- crypto_cipher_encrypt_one(tfm, out, ctx->prev);
+ offset += bs;
}
- return 0;
-}
+ crypto_xor(prev, odds, bs);
+ crypto_xor(prev, consts + offset, bs);
-static int crypto_xcbc_digest(struct hash_desc *pdesc,
- struct scatterlist *sg, unsigned int nbytes, u8 *out)
-{
- if (WARN_ON_ONCE(in_irq()))
- return -EDEADLK;
+ crypto_cipher_encrypt_one(tfm, out, prev);
- crypto_xcbc_digest_init(pdesc);
- crypto_xcbc_digest_update2(pdesc, sg, nbytes);
- return crypto_xcbc_digest_final(pdesc, out);
+ return 0;
}
static int xcbc_init_tfm(struct crypto_tfm *tfm)
@@ -264,95 +179,95 @@ static int xcbc_init_tfm(struct crypto_tfm *tfm)
struct crypto_cipher *cipher;
struct crypto_instance *inst = (void *)tfm->__crt_alg;
struct crypto_spawn *spawn = crypto_instance_ctx(inst);
- struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm));
- int bs = crypto_hash_blocksize(__crypto_hash_cast(tfm));
+ struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
cipher = crypto_spawn_cipher(spawn);
if (IS_ERR(cipher))
return PTR_ERR(cipher);
- switch(bs) {
- case 16:
- ctx->xor = xor_128;
- break;
- default:
- return -EINVAL;
- }
-
ctx->child = cipher;
- ctx->odds = (u8*)(ctx+1);
- ctx->prev = ctx->odds + bs;
- ctx->key = ctx->prev + bs;
return 0;
};
static void xcbc_exit_tfm(struct crypto_tfm *tfm)
{
- struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm));
+ struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
crypto_free_cipher(ctx->child);
}
-static struct crypto_instance *xcbc_alloc(struct rtattr **tb)
+static int xcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
{
- struct crypto_instance *inst;
+ struct shash_instance *inst;
struct crypto_alg *alg;
+ unsigned long alignmask;
int err;
- err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_HASH);
+ err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
if (err)
- return ERR_PTR(err);
+ return err;
alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
CRYPTO_ALG_TYPE_MASK);
if (IS_ERR(alg))
- return ERR_CAST(alg);
+ return PTR_ERR(alg);
switch(alg->cra_blocksize) {
case 16:
break;
default:
- inst = ERR_PTR(-EINVAL);
goto out_put_alg;
}
- inst = crypto_alloc_instance("xcbc", alg);
+ inst = shash_alloc_instance("xcbc", alg);
+ err = PTR_ERR(inst);
if (IS_ERR(inst))
goto out_put_alg;
- inst->alg.cra_flags = CRYPTO_ALG_TYPE_HASH;
- inst->alg.cra_priority = alg->cra_priority;
- inst->alg.cra_blocksize = alg->cra_blocksize;
- inst->alg.cra_alignmask = alg->cra_alignmask;
- inst->alg.cra_type = &crypto_hash_type;
-
- inst->alg.cra_hash.digestsize = alg->cra_blocksize;
- inst->alg.cra_ctxsize = sizeof(struct crypto_xcbc_ctx) +
- ALIGN(inst->alg.cra_blocksize * 3, sizeof(void *));
- inst->alg.cra_init = xcbc_init_tfm;
- inst->alg.cra_exit = xcbc_exit_tfm;
-
- inst->alg.cra_hash.init = crypto_xcbc_digest_init;
- inst->alg.cra_hash.update = crypto_xcbc_digest_update;
- inst->alg.cra_hash.final = crypto_xcbc_digest_final;
- inst->alg.cra_hash.digest = crypto_xcbc_digest;
- inst->alg.cra_hash.setkey = crypto_xcbc_digest_setkey;
+ err = crypto_init_spawn(shash_instance_ctx(inst), alg,
+ shash_crypto_instance(inst),
+ CRYPTO_ALG_TYPE_MASK);
+ if (err)
+ goto out_free_inst;
+
+ alignmask = alg->cra_alignmask | 3;
+ inst->alg.base.cra_alignmask = alignmask;
+ inst->alg.base.cra_priority = alg->cra_priority;
+ inst->alg.base.cra_blocksize = alg->cra_blocksize;
+
+ inst->alg.digestsize = alg->cra_blocksize;
+ inst->alg.descsize = ALIGN(sizeof(struct xcbc_desc_ctx),
+ crypto_tfm_ctx_alignment()) +
+ (alignmask &
+ ~(crypto_tfm_ctx_alignment() - 1)) +
+ alg->cra_blocksize * 2;
+
+ inst->alg.base.cra_ctxsize = ALIGN(sizeof(struct xcbc_tfm_ctx),
+ alignmask + 1) +
+ alg->cra_blocksize * 2;
+ inst->alg.base.cra_init = xcbc_init_tfm;
+ inst->alg.base.cra_exit = xcbc_exit_tfm;
+
+ inst->alg.init = crypto_xcbc_digest_init;
+ inst->alg.update = crypto_xcbc_digest_update;
+ inst->alg.final = crypto_xcbc_digest_final;
+ inst->alg.setkey = crypto_xcbc_digest_setkey;
+
+ err = shash_register_instance(tmpl, inst);
+ if (err) {
+out_free_inst:
+ shash_free_instance(shash_crypto_instance(inst));
+ }
out_put_alg:
crypto_mod_put(alg);
- return inst;
-}
-
-static void xcbc_free(struct crypto_instance *inst)
-{
- crypto_drop_spawn(crypto_instance_ctx(inst));
- kfree(inst);
+ return err;
}
static struct crypto_template crypto_xcbc_tmpl = {
.name = "xcbc",
- .alloc = xcbc_alloc,
- .free = xcbc_free,
+ .create = xcbc_create,
+ .free = shash_free_instance,
.module = THIS_MODULE,
};
diff --git a/crypto/xor.c b/crypto/xor.c
index b2e6db075e4..35d6b3adf23 100644
--- a/crypto/xor.c
+++ b/crypto/xor.c
@@ -18,8 +18,10 @@
#define BH_TRACE 0
#include <linux/module.h>
-#include <linux/raid/md.h>
+#include <linux/gfp.h>
#include <linux/raid/xor.h>
+#include <linux/jiffies.h>
+#include <linux/preempt.h>
#include <asm/xor.h>
/* The xor routines to use. */
@@ -54,20 +56,22 @@ xor_blocks(unsigned int src_count, unsigned int bytes, void *dest, void **srcs)
EXPORT_SYMBOL(xor_blocks);
/* Set of all registered templates. */
-static struct xor_block_template *template_list;
+static struct xor_block_template *__initdata template_list;
#define BENCH_SIZE (PAGE_SIZE)
-static void
+static void __init
do_xor_speed(struct xor_block_template *tmpl, void *b1, void *b2)
{
int speed;
- unsigned long now;
+ unsigned long now, j;
int i, count, max;
tmpl->next = template_list;
template_list = tmpl;
+ preempt_disable();
+
/*
* Count the number of XORs done during a whole jiffy, and use
* this to calculate the speed of checksumming. We use a 2-page
@@ -75,9 +79,11 @@ do_xor_speed(struct xor_block_template *tmpl, void *b1, void *b2)
*/
max = 0;
for (i = 0; i < 5; i++) {
- now = jiffies;
+ j = jiffies;
count = 0;
- while (jiffies == now) {
+ while ((now = jiffies) == j)
+ cpu_relax();
+ while (time_before(jiffies, now + 1)) {
mb(); /* prevent loop optimzation */
tmpl->do_2(BENCH_SIZE, b1, b2);
mb();
@@ -88,6 +94,8 @@ do_xor_speed(struct xor_block_template *tmpl, void *b1, void *b2)
max = count;
}
+ preempt_enable();
+
speed = max * (HZ * BENCH_SIZE / 1024);
tmpl->speed = speed;
@@ -101,7 +109,12 @@ calibrate_xor_blocks(void)
void *b1, *b2;
struct xor_block_template *f, *fastest;
- b1 = (void *) __get_free_pages(GFP_KERNEL, 2);
+ /*
+ * Note: Since the memory is not actually used for _anything_ but to
+ * test the XOR speed, we don't really want kmemcheck to warn about
+ * reading uninitialized bytes here.
+ */
+ b1 = (void *) __get_free_pages(GFP_KERNEL | __GFP_NOTRACK, 2);
if (!b1) {
printk(KERN_WARNING "xor: Yikes! No memory available.\n");
return -ENOMEM;
@@ -123,9 +136,9 @@ calibrate_xor_blocks(void)
if (fastest) {
printk(KERN_INFO "xor: automatically using best "
- "checksumming function: %s\n",
- fastest->name);
+ "checksumming function:\n");
xor_speed(fastest);
+ goto out;
} else {
printk(KERN_INFO "xor: measuring software checksum speed\n");
XOR_TRY_TEMPLATES;
@@ -140,6 +153,7 @@ calibrate_xor_blocks(void)
#undef xor_speed
+ out:
free_pages((unsigned long)b1, 2);
active_template = fastest;
diff --git a/crypto/xts.c b/crypto/xts.c
index d87b0f3102c..ca1608f44cb 100644
--- a/crypto/xts.c
+++ b/crypto/xts.c
@@ -21,6 +21,7 @@
#include <linux/scatterlist.h>
#include <linux/slab.h>
+#include <crypto/xts.h>
#include <crypto/b128ops.h>
#include <crypto/gf128mul.h>
@@ -45,7 +46,7 @@ static int setkey(struct crypto_tfm *parent, const u8 *key,
return -EINVAL;
}
- /* we need two cipher instances: one to compute the inital 'tweak'
+ /* we need two cipher instances: one to compute the initial 'tweak'
* by encrypting the IV (usually the 'plain' iv) and the other
* one to encrypt and decrypt the data */
@@ -96,7 +97,7 @@ static int crypt(struct blkcipher_desc *d,
{
int err;
unsigned int avail;
- const int bs = crypto_cipher_blocksize(ctx->child);
+ const int bs = XTS_BLOCK_SIZE;
struct sinfo s = {
.tfm = crypto_cipher_tfm(ctx->child),
.fn = fn
@@ -165,6 +166,78 @@ static int decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
crypto_cipher_alg(ctx->child)->cia_decrypt);
}
+int xts_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst,
+ struct scatterlist *ssrc, unsigned int nbytes,
+ struct xts_crypt_req *req)
+{
+ const unsigned int bsize = XTS_BLOCK_SIZE;
+ const unsigned int max_blks = req->tbuflen / bsize;
+ struct blkcipher_walk walk;
+ unsigned int nblocks;
+ be128 *src, *dst, *t;
+ be128 *t_buf = req->tbuf;
+ int err, i;
+
+ BUG_ON(max_blks < 1);
+
+ blkcipher_walk_init(&walk, sdst, ssrc, nbytes);
+
+ err = blkcipher_walk_virt(desc, &walk);
+ nbytes = walk.nbytes;
+ if (!nbytes)
+ return err;
+
+ nblocks = min(nbytes / bsize, max_blks);
+ src = (be128 *)walk.src.virt.addr;
+ dst = (be128 *)walk.dst.virt.addr;
+
+ /* calculate first value of T */
+ req->tweak_fn(req->tweak_ctx, (u8 *)&t_buf[0], walk.iv);
+
+ i = 0;
+ goto first;
+
+ for (;;) {
+ do {
+ for (i = 0; i < nblocks; i++) {
+ gf128mul_x_ble(&t_buf[i], t);
+first:
+ t = &t_buf[i];
+
+ /* PP <- T xor P */
+ be128_xor(dst + i, t, src + i);
+ }
+
+ /* CC <- E(Key2,PP) */
+ req->crypt_fn(req->crypt_ctx, (u8 *)dst,
+ nblocks * bsize);
+
+ /* C <- T xor CC */
+ for (i = 0; i < nblocks; i++)
+ be128_xor(dst + i, dst + i, &t_buf[i]);
+
+ src += nblocks;
+ dst += nblocks;
+ nbytes -= nblocks * bsize;
+ nblocks = min(nbytes / bsize, max_blks);
+ } while (nblocks > 0);
+
+ *(be128 *)walk.iv = *t;
+
+ err = blkcipher_walk_done(desc, &walk, nbytes);
+ nbytes = walk.nbytes;
+ if (!nbytes)
+ break;
+
+ nblocks = min(nbytes / bsize, max_blks);
+ src = (be128 *)walk.src.virt.addr;
+ dst = (be128 *)walk.dst.virt.addr;
+ }
+
+ return err;
+}
+EXPORT_SYMBOL_GPL(xts_crypt);
+
static int init_tfm(struct crypto_tfm *tfm)
{
struct crypto_cipher *cipher;
@@ -177,7 +250,7 @@ static int init_tfm(struct crypto_tfm *tfm)
if (IS_ERR(cipher))
return PTR_ERR(cipher);
- if (crypto_cipher_blocksize(cipher) != 16) {
+ if (crypto_cipher_blocksize(cipher) != XTS_BLOCK_SIZE) {
*flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
crypto_free_cipher(cipher);
return -EINVAL;
@@ -192,7 +265,7 @@ static int init_tfm(struct crypto_tfm *tfm)
}
/* this check isn't really needed, leave it here just in case */
- if (crypto_cipher_blocksize(cipher) != 16) {
+ if (crypto_cipher_blocksize(cipher) != XTS_BLOCK_SIZE) {
crypto_free_cipher(cipher);
crypto_free_cipher(ctx->child);
*flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
@@ -224,7 +297,7 @@ static struct crypto_instance *alloc(struct rtattr **tb)
alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
CRYPTO_ALG_TYPE_MASK);
if (IS_ERR(alg))
- return ERR_PTR(PTR_ERR(alg));
+ return ERR_CAST(alg);
inst = crypto_alloc_instance("xts", alg);
if (IS_ERR(inst))
diff --git a/crypto/zlib.c b/crypto/zlib.c
new file mode 100644
index 00000000000..06b62e5cdcc
--- /dev/null
+++ b/crypto/zlib.c
@@ -0,0 +1,380 @@
+/*
+ * Cryptographic API.
+ *
+ * Zlib algorithm
+ *
+ * Copyright 2008 Sony Corporation
+ *
+ * Based on deflate.c, which is
+ * Copyright (c) 2003 James Morris <jmorris@intercode.com.au>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ * FIXME: deflate transforms will require up to a total of about 436k of kernel
+ * memory on i386 (390k for compression, the rest for decompression), as the
+ * current zlib kernel code uses a worst case pre-allocation system by default.
+ * This needs to be fixed so that the amount of memory required is properly
+ * related to the winbits and memlevel parameters.
+ */
+
+#define pr_fmt(fmt) "%s: " fmt, __func__
+
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/zlib.h>
+#include <linux/vmalloc.h>
+#include <linux/interrupt.h>
+#include <linux/mm.h>
+#include <linux/net.h>
+
+#include <crypto/internal/compress.h>
+
+#include <net/netlink.h>
+
+
+struct zlib_ctx {
+ struct z_stream_s comp_stream;
+ struct z_stream_s decomp_stream;
+ int decomp_windowBits;
+};
+
+
+static void zlib_comp_exit(struct zlib_ctx *ctx)
+{
+ struct z_stream_s *stream = &ctx->comp_stream;
+
+ if (stream->workspace) {
+ zlib_deflateEnd(stream);
+ vfree(stream->workspace);
+ stream->workspace = NULL;
+ }
+}
+
+static void zlib_decomp_exit(struct zlib_ctx *ctx)
+{
+ struct z_stream_s *stream = &ctx->decomp_stream;
+
+ if (stream->workspace) {
+ zlib_inflateEnd(stream);
+ vfree(stream->workspace);
+ stream->workspace = NULL;
+ }
+}
+
+static int zlib_init(struct crypto_tfm *tfm)
+{
+ return 0;
+}
+
+static void zlib_exit(struct crypto_tfm *tfm)
+{
+ struct zlib_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ zlib_comp_exit(ctx);
+ zlib_decomp_exit(ctx);
+}
+
+
+static int zlib_compress_setup(struct crypto_pcomp *tfm, void *params,
+ unsigned int len)
+{
+ struct zlib_ctx *ctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm));
+ struct z_stream_s *stream = &ctx->comp_stream;
+ struct nlattr *tb[ZLIB_COMP_MAX + 1];
+ int window_bits, mem_level;
+ size_t workspacesize;
+ int ret;
+
+ ret = nla_parse(tb, ZLIB_COMP_MAX, params, len, NULL);
+ if (ret)
+ return ret;
+
+ zlib_comp_exit(ctx);
+
+ window_bits = tb[ZLIB_COMP_WINDOWBITS]
+ ? nla_get_u32(tb[ZLIB_COMP_WINDOWBITS])
+ : MAX_WBITS;
+ mem_level = tb[ZLIB_COMP_MEMLEVEL]
+ ? nla_get_u32(tb[ZLIB_COMP_MEMLEVEL])
+ : DEF_MEM_LEVEL;
+
+ workspacesize = zlib_deflate_workspacesize(window_bits, mem_level);
+ stream->workspace = vzalloc(workspacesize);
+ if (!stream->workspace)
+ return -ENOMEM;
+
+ ret = zlib_deflateInit2(stream,
+ tb[ZLIB_COMP_LEVEL]
+ ? nla_get_u32(tb[ZLIB_COMP_LEVEL])
+ : Z_DEFAULT_COMPRESSION,
+ tb[ZLIB_COMP_METHOD]
+ ? nla_get_u32(tb[ZLIB_COMP_METHOD])
+ : Z_DEFLATED,
+ window_bits,
+ mem_level,
+ tb[ZLIB_COMP_STRATEGY]
+ ? nla_get_u32(tb[ZLIB_COMP_STRATEGY])
+ : Z_DEFAULT_STRATEGY);
+ if (ret != Z_OK) {
+ vfree(stream->workspace);
+ stream->workspace = NULL;
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
+static int zlib_compress_init(struct crypto_pcomp *tfm)
+{
+ int ret;
+ struct zlib_ctx *dctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm));
+ struct z_stream_s *stream = &dctx->comp_stream;
+
+ ret = zlib_deflateReset(stream);
+ if (ret != Z_OK)
+ return -EINVAL;
+
+ return 0;
+}
+
+static int zlib_compress_update(struct crypto_pcomp *tfm,
+ struct comp_request *req)
+{
+ int ret;
+ struct zlib_ctx *dctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm));
+ struct z_stream_s *stream = &dctx->comp_stream;
+
+ pr_debug("avail_in %u, avail_out %u\n", req->avail_in, req->avail_out);
+ stream->next_in = req->next_in;
+ stream->avail_in = req->avail_in;
+ stream->next_out = req->next_out;
+ stream->avail_out = req->avail_out;
+
+ ret = zlib_deflate(stream, Z_NO_FLUSH);
+ switch (ret) {
+ case Z_OK:
+ break;
+
+ case Z_BUF_ERROR:
+ pr_debug("zlib_deflate could not make progress\n");
+ return -EAGAIN;
+
+ default:
+ pr_debug("zlib_deflate failed %d\n", ret);
+ return -EINVAL;
+ }
+
+ ret = req->avail_out - stream->avail_out;
+ pr_debug("avail_in %u, avail_out %u (consumed %u, produced %u)\n",
+ stream->avail_in, stream->avail_out,
+ req->avail_in - stream->avail_in, ret);
+ req->next_in = stream->next_in;
+ req->avail_in = stream->avail_in;
+ req->next_out = stream->next_out;
+ req->avail_out = stream->avail_out;
+ return ret;
+}
+
+static int zlib_compress_final(struct crypto_pcomp *tfm,
+ struct comp_request *req)
+{
+ int ret;
+ struct zlib_ctx *dctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm));
+ struct z_stream_s *stream = &dctx->comp_stream;
+
+ pr_debug("avail_in %u, avail_out %u\n", req->avail_in, req->avail_out);
+ stream->next_in = req->next_in;
+ stream->avail_in = req->avail_in;
+ stream->next_out = req->next_out;
+ stream->avail_out = req->avail_out;
+
+ ret = zlib_deflate(stream, Z_FINISH);
+ if (ret != Z_STREAM_END) {
+ pr_debug("zlib_deflate failed %d\n", ret);
+ return -EINVAL;
+ }
+
+ ret = req->avail_out - stream->avail_out;
+ pr_debug("avail_in %u, avail_out %u (consumed %u, produced %u)\n",
+ stream->avail_in, stream->avail_out,
+ req->avail_in - stream->avail_in, ret);
+ req->next_in = stream->next_in;
+ req->avail_in = stream->avail_in;
+ req->next_out = stream->next_out;
+ req->avail_out = stream->avail_out;
+ return ret;
+}
+
+
+static int zlib_decompress_setup(struct crypto_pcomp *tfm, void *params,
+ unsigned int len)
+{
+ struct zlib_ctx *ctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm));
+ struct z_stream_s *stream = &ctx->decomp_stream;
+ struct nlattr *tb[ZLIB_DECOMP_MAX + 1];
+ int ret = 0;
+
+ ret = nla_parse(tb, ZLIB_DECOMP_MAX, params, len, NULL);
+ if (ret)
+ return ret;
+
+ zlib_decomp_exit(ctx);
+
+ ctx->decomp_windowBits = tb[ZLIB_DECOMP_WINDOWBITS]
+ ? nla_get_u32(tb[ZLIB_DECOMP_WINDOWBITS])
+ : DEF_WBITS;
+
+ stream->workspace = vzalloc(zlib_inflate_workspacesize());
+ if (!stream->workspace)
+ return -ENOMEM;
+
+ ret = zlib_inflateInit2(stream, ctx->decomp_windowBits);
+ if (ret != Z_OK) {
+ vfree(stream->workspace);
+ stream->workspace = NULL;
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
+static int zlib_decompress_init(struct crypto_pcomp *tfm)
+{
+ int ret;
+ struct zlib_ctx *dctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm));
+ struct z_stream_s *stream = &dctx->decomp_stream;
+
+ ret = zlib_inflateReset(stream);
+ if (ret != Z_OK)
+ return -EINVAL;
+
+ return 0;
+}
+
+static int zlib_decompress_update(struct crypto_pcomp *tfm,
+ struct comp_request *req)
+{
+ int ret;
+ struct zlib_ctx *dctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm));
+ struct z_stream_s *stream = &dctx->decomp_stream;
+
+ pr_debug("avail_in %u, avail_out %u\n", req->avail_in, req->avail_out);
+ stream->next_in = req->next_in;
+ stream->avail_in = req->avail_in;
+ stream->next_out = req->next_out;
+ stream->avail_out = req->avail_out;
+
+ ret = zlib_inflate(stream, Z_SYNC_FLUSH);
+ switch (ret) {
+ case Z_OK:
+ case Z_STREAM_END:
+ break;
+
+ case Z_BUF_ERROR:
+ pr_debug("zlib_inflate could not make progress\n");
+ return -EAGAIN;
+
+ default:
+ pr_debug("zlib_inflate failed %d\n", ret);
+ return -EINVAL;
+ }
+
+ ret = req->avail_out - stream->avail_out;
+ pr_debug("avail_in %u, avail_out %u (consumed %u, produced %u)\n",
+ stream->avail_in, stream->avail_out,
+ req->avail_in - stream->avail_in, ret);
+ req->next_in = stream->next_in;
+ req->avail_in = stream->avail_in;
+ req->next_out = stream->next_out;
+ req->avail_out = stream->avail_out;
+ return ret;
+}
+
+static int zlib_decompress_final(struct crypto_pcomp *tfm,
+ struct comp_request *req)
+{
+ int ret;
+ struct zlib_ctx *dctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm));
+ struct z_stream_s *stream = &dctx->decomp_stream;
+
+ pr_debug("avail_in %u, avail_out %u\n", req->avail_in, req->avail_out);
+ stream->next_in = req->next_in;
+ stream->avail_in = req->avail_in;
+ stream->next_out = req->next_out;
+ stream->avail_out = req->avail_out;
+
+ if (dctx->decomp_windowBits < 0) {
+ ret = zlib_inflate(stream, Z_SYNC_FLUSH);
+ /*
+ * Work around a bug in zlib, which sometimes wants to taste an
+ * extra byte when being used in the (undocumented) raw deflate
+ * mode. (From USAGI).
+ */
+ if (ret == Z_OK && !stream->avail_in && stream->avail_out) {
+ const void *saved_next_in = stream->next_in;
+ u8 zerostuff = 0;
+
+ stream->next_in = &zerostuff;
+ stream->avail_in = 1;
+ ret = zlib_inflate(stream, Z_FINISH);
+ stream->next_in = saved_next_in;
+ stream->avail_in = 0;
+ }
+ } else
+ ret = zlib_inflate(stream, Z_FINISH);
+ if (ret != Z_STREAM_END) {
+ pr_debug("zlib_inflate failed %d\n", ret);
+ return -EINVAL;
+ }
+
+ ret = req->avail_out - stream->avail_out;
+ pr_debug("avail_in %u, avail_out %u (consumed %u, produced %u)\n",
+ stream->avail_in, stream->avail_out,
+ req->avail_in - stream->avail_in, ret);
+ req->next_in = stream->next_in;
+ req->avail_in = stream->avail_in;
+ req->next_out = stream->next_out;
+ req->avail_out = stream->avail_out;
+ return ret;
+}
+
+
+static struct pcomp_alg zlib_alg = {
+ .compress_setup = zlib_compress_setup,
+ .compress_init = zlib_compress_init,
+ .compress_update = zlib_compress_update,
+ .compress_final = zlib_compress_final,
+ .decompress_setup = zlib_decompress_setup,
+ .decompress_init = zlib_decompress_init,
+ .decompress_update = zlib_decompress_update,
+ .decompress_final = zlib_decompress_final,
+
+ .base = {
+ .cra_name = "zlib",
+ .cra_flags = CRYPTO_ALG_TYPE_PCOMPRESS,
+ .cra_ctxsize = sizeof(struct zlib_ctx),
+ .cra_module = THIS_MODULE,
+ .cra_init = zlib_init,
+ .cra_exit = zlib_exit,
+ }
+};
+
+static int __init zlib_mod_init(void)
+{
+ return crypto_register_pcomp(&zlib_alg);
+}
+
+static void __exit zlib_mod_fini(void)
+{
+ crypto_unregister_pcomp(&zlib_alg);
+}
+
+module_init(zlib_mod_init);
+module_exit(zlib_mod_fini);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Zlib Compression Algorithm");
+MODULE_AUTHOR("Sony Corporation");