From a5f8c473052bc693cdbe2f9ae4b424b993886ff5 Mon Sep 17 00:00:00 2001 From: Denis Vlasenko Date: Mon, 16 Jan 2006 17:42:28 +1100 Subject: [CRYPTO] twofish: Use rol32/ror32 where appropriate Convert open coded rotations to rol32/ror32. Signed-off-by: Herbert Xu --- crypto/twofish.c | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/crypto/twofish.c b/crypto/twofish.c index a26d885486f..ddfd5a3fcc5 100644 --- a/crypto/twofish.c +++ b/crypto/twofish.c @@ -44,6 +44,7 @@ #include #include #include +#include /* The large precomputed tables for the Twofish cipher (twofish.c) @@ -542,9 +543,9 @@ static const u8 calc_sb_tbl[512] = { #define CALC_K(a, j, k, l, m, n) \ x = CALC_K_2 (k, l, k, l, 0); \ y = CALC_K_2 (m, n, m, n, 4); \ - y = (y << 8) + (y >> 24); \ + y = rol32(y, 8); \ x += y; y += x; ctx->a[j] = x; \ - ctx->a[(j) + 1] = (y << 9) + (y >> 23) + ctx->a[(j) + 1] = rol32(y, 9) #define CALC_K192_2(a, b, c, d, j) \ CALC_K_2 (q0[a ^ key[(j) + 16]], \ @@ -555,9 +556,9 @@ static const u8 calc_sb_tbl[512] = { #define CALC_K192(a, j, k, l, m, n) \ x = CALC_K192_2 (l, l, k, k, 0); \ y = CALC_K192_2 (n, n, m, m, 4); \ - y = (y << 8) + (y >> 24); \ + y = rol32(y, 8); \ x += y; y += x; ctx->a[j] = x; \ - ctx->a[(j) + 1] = (y << 9) + (y >> 23) + ctx->a[(j) + 1] = rol32(y, 9) #define CALC_K256_2(a, b, j) \ CALC_K192_2 (q1[b ^ key[(j) + 24]], \ @@ -568,9 +569,9 @@ static const u8 calc_sb_tbl[512] = { #define CALC_K256(a, j, k, l, m, n) \ x = CALC_K256_2 (k, l, 0); \ y = CALC_K256_2 (m, n, 4); \ - y = (y << 8) + (y >> 24); \ + y = rol32(y, 8); \ x += y; y += x; ctx->a[j] = x; \ - ctx->a[(j) + 1] = (y << 9) + (y >> 23) + ctx->a[(j) + 1] = rol32(y, 9) /* Macros to compute the g() function in the encryption and decryption @@ -594,15 +595,15 @@ static const u8 calc_sb_tbl[512] = { x = G1 (a); y = G2 (b); \ x += y; y += x + ctx->k[2 * (n) + 1]; \ (c) ^= x + ctx->k[2 * (n)]; \ - (c) = ((c) >> 1) + ((c) << 31); \ - (d) = (((d) << 1)+((d) >> 31)) ^ y + (c) = ror32((c), 1); \ + (d) = rol32((d), 1) ^ y #define DECROUND(n, a, b, c, d) \ x = G1 (a); y = G2 (b); \ x += y; y += x; \ (d) ^= y + ctx->k[2 * (n) + 1]; \ - (d) = ((d) >> 1) + ((d) << 31); \ - (c) = (((c) << 1)+((c) >> 31)); \ + (d) = ror32((d), 1); \ + (c) = rol32((c), 1); \ (c) ^= (x + ctx->k[2 * (n)]) /* Encryption and decryption cycles; each one is simply two Feistel rounds -- cgit v1.2.3-18-g5258 From f10b7897ee29649fa7f0ccdc8d859ccd6ce7dbfd Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 25 Jan 2006 22:34:01 +1100 Subject: [CRYPTO] api: Align tfm context as wide as possible Since tfm contexts can contain arbitrary types we should provide at least natural alignment (__attribute__ ((__aligned__))) for them. In particular, this is needed on the Xscale which is a 32-bit architecture with a u64 type that requires 64-bit alignment. This problem was reported by Ronen Shitrit. The crypto_tfm structure's size was 44 bytes on 32-bit architectures and 80 bytes on 64-bit architectures. So adding this requirement only means that we have to add an extra 4 bytes on 32-bit architectures. On i386 the natural alignment is 16 bytes which also benefits the VIA Padlock as it no longer has to manually align its context structure to 128 bits. Signed-off-by: Herbert Xu --- crypto/api.c | 2 +- drivers/crypto/padlock-aes.c | 6 +++++- include/linux/crypto.h | 10 +++++++++- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/crypto/api.c b/crypto/api.c index e26156f7183..34e02caffc2 100644 --- a/crypto/api.c +++ b/crypto/api.c @@ -165,7 +165,7 @@ static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags) break; } - return len + alg->cra_alignmask; + return len + (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1)); } struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags) diff --git a/drivers/crypto/padlock-aes.c b/drivers/crypto/padlock-aes.c index 0c08c58252b..5158a9db4bc 100644 --- a/drivers/crypto/padlock-aes.c +++ b/drivers/crypto/padlock-aes.c @@ -284,7 +284,11 @@ aes_hw_extkey_available(uint8_t key_len) static inline struct aes_ctx *aes_ctx(void *ctx) { - return (struct aes_ctx *)ALIGN((unsigned long)ctx, PADLOCK_ALIGNMENT); + unsigned long align = PADLOCK_ALIGNMENT; + + if (align <= crypto_tfm_ctx_alignment()) + align = 1; + return (struct aes_ctx *)ALIGN((unsigned long)ctx, align); } static int diff --git a/include/linux/crypto.h b/include/linux/crypto.h index d88bf8aa8b4..0ab1bc1152c 100644 --- a/include/linux/crypto.h +++ b/include/linux/crypto.h @@ -229,6 +229,8 @@ struct crypto_tfm { } crt_u; struct crypto_alg *__crt_alg; + + char __crt_ctx[] __attribute__ ((__aligned__)); }; /* @@ -301,7 +303,13 @@ static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm) static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) { - return (void *)&tfm[1]; + return tfm->__crt_ctx; +} + +static inline unsigned int crypto_tfm_ctx_alignment(void) +{ + struct crypto_tfm *tfm; + return __alignof__(tfm->__crt_ctx); } /* -- cgit v1.2.3-18-g5258 From bbeb563f7bb5e703e4d60c18bed81c987781343f Mon Sep 17 00:00:00 2001 From: Eric Sesterhenn Date: Mon, 6 Mar 2006 21:42:07 +1100 Subject: [CRYPTO] all: Use kzalloc where possible this patch converts crypto/ to kzalloc usage. Compile tested with allyesconfig. Signed-off-by: Eric Sesterhenn Signed-off-by: Herbert Xu --- crypto/api.c | 4 +--- crypto/deflate.c | 3 +-- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/crypto/api.c b/crypto/api.c index 34e02caffc2..80bba637fba 100644 --- a/crypto/api.c +++ b/crypto/api.c @@ -179,12 +179,10 @@ struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags) goto out; tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags); - tfm = kmalloc(tfm_size, GFP_KERNEL); + tfm = kzalloc(tfm_size, GFP_KERNEL); if (tfm == NULL) goto out_put; - memset(tfm, 0, tfm_size); - tfm->__crt_alg = alg; if (crypto_init_flags(tfm, flags)) diff --git a/crypto/deflate.c b/crypto/deflate.c index bc73342cd1e..f209368d62a 100644 --- a/crypto/deflate.c +++ b/crypto/deflate.c @@ -73,12 +73,11 @@ static int deflate_decomp_init(struct deflate_ctx *ctx) int ret = 0; struct z_stream_s *stream = &ctx->decomp_stream; - stream->workspace = kmalloc(zlib_inflate_workspacesize(), GFP_KERNEL); + stream->workspace = kzalloc(zlib_inflate_workspacesize(), GFP_KERNEL); if (!stream->workspace ) { ret = -ENOMEM; goto out; } - memset(stream->workspace, 0, zlib_inflate_workspacesize()); ret = zlib_inflateInit2(stream, -DEFLATE_DEF_WINBITS); if (ret != Z_OK) { ret = -EINVAL; -- cgit v1.2.3-18-g5258 From 20ea340489ddee7b3a438ee58f32f2608cc145de Mon Sep 17 00:00:00 2001 From: Atsushi Nemoto Date: Mon, 13 Mar 2006 21:30:29 +1100 Subject: [CRYPTO] all: Add missing cra_alignmask The "des3_ede" and "serpent" lack cra_alignmask. Signed-off-by: Atsushi Nemoto Signed-off-by: Herbert Xu --- crypto/des.c | 1 + crypto/serpent.c | 1 + 2 files changed, 2 insertions(+) diff --git a/crypto/des.c b/crypto/des.c index 7bb548653dc..2d74cab40c3 100644 --- a/crypto/des.c +++ b/crypto/des.c @@ -965,6 +965,7 @@ static struct crypto_alg des3_ede_alg = { .cra_blocksize = DES3_EDE_BLOCK_SIZE, .cra_ctxsize = sizeof(struct des3_ede_ctx), .cra_module = THIS_MODULE, + .cra_alignmask = 3, .cra_list = LIST_HEAD_INIT(des3_ede_alg.cra_list), .cra_u = { .cipher = { .cia_min_keysize = DES3_EDE_KEY_SIZE, diff --git a/crypto/serpent.c b/crypto/serpent.c index 52ad1a49262..e366406ab49 100644 --- a/crypto/serpent.c +++ b/crypto/serpent.c @@ -481,6 +481,7 @@ static struct crypto_alg serpent_alg = { .cra_flags = CRYPTO_ALG_TYPE_CIPHER, .cra_blocksize = SERPENT_BLOCK_SIZE, .cra_ctxsize = sizeof(struct serpent_ctx), + .cra_alignmask = 3, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(serpent_alg.cra_list), .cra_u = { .cipher = { -- cgit v1.2.3-18-g5258 From 06b42aa94b65806b4f8c5fc893ef97a2f491fb32 Mon Sep 17 00:00:00 2001 From: Atsushi Nemoto Date: Mon, 13 Mar 2006 21:39:23 +1100 Subject: [CRYPTO] tcrypt: Fix key alignment Force 32-bit alignment on keys in tcrypt test vectors. Also rearrange the structure to prevent unnecessary padding. Signed-off-by: Atsushi Nemoto Signed-off-by: Herbert Xu --- crypto/tcrypt.h | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h index 733d07ed75e..1f683ba794e 100644 --- a/crypto/tcrypt.h +++ b/crypto/tcrypt.h @@ -26,37 +26,38 @@ #define MAX_IVLEN 32 struct hash_testvec { + /* only used with keyed hash algorithms */ + char key[128] __attribute__ ((__aligned__(4))); char plaintext[128]; - unsigned char psize; char digest[MAX_DIGEST_SIZE]; - unsigned char np; unsigned char tap[MAX_TAP]; - char key[128]; /* only used with keyed hash algorithms */ + unsigned char psize; + unsigned char np; unsigned char ksize; }; struct hmac_testvec { char key[128]; - unsigned char ksize; char plaintext[128]; - unsigned char psize; char digest[MAX_DIGEST_SIZE]; - unsigned char np; unsigned char tap[MAX_TAP]; + unsigned char ksize; + unsigned char psize; + unsigned char np; }; struct cipher_testvec { + char key[MAX_KEYLEN] __attribute__ ((__aligned__(4))); + char iv[MAX_IVLEN]; + char input[48]; + char result[48]; + unsigned char tap[MAX_TAP]; + int np; unsigned char fail; unsigned char wk; /* weak key flag */ - char key[MAX_KEYLEN]; unsigned char klen; - char iv[MAX_IVLEN]; - char input[48]; unsigned char ilen; - char result[48]; unsigned char rlen; - int np; - unsigned char tap[MAX_TAP]; }; struct cipher_speed { -- cgit v1.2.3-18-g5258 From 55e9dce37ddf3ab358ba1d1e9eef4ee4bd8174a6 Mon Sep 17 00:00:00 2001 From: David McCullough Date: Wed, 15 Mar 2006 21:08:51 +1100 Subject: [CRYPTO] aes: Fixed array boundary violation The AES setkey routine writes 64 bytes to the E_KEY area even though there are only 60 bytes there. It is in fact safe since E_KEY is immediately follwed by D_KEY which is initialised afterwards. However, doing this may trigger undefined behaviour and makes Coverity unhappy. So by combining E_KEY and D_KEY into one array we sidestep this issue altogether. This problem was reported by Adrian Bunk. Signed-off-by: Herbert Xu --- arch/x86_64/crypto/aes.c | 7 +++---- crypto/aes.c | 7 +++---- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/arch/x86_64/crypto/aes.c b/arch/x86_64/crypto/aes.c index fb1b961a2e2..6f77e7700d3 100644 --- a/arch/x86_64/crypto/aes.c +++ b/arch/x86_64/crypto/aes.c @@ -77,12 +77,11 @@ static inline u8 byte(const u32 x, const unsigned n) struct aes_ctx { u32 key_length; - u32 E[60]; - u32 D[60]; + u32 buf[120]; }; -#define E_KEY ctx->E -#define D_KEY ctx->D +#define E_KEY (&ctx->buf[0]) +#define D_KEY (&ctx->buf[60]) static u8 pow_tab[256] __initdata; static u8 log_tab[256] __initdata; diff --git a/crypto/aes.c b/crypto/aes.c index 0a6a5c14368..a5017292e06 100644 --- a/crypto/aes.c +++ b/crypto/aes.c @@ -75,12 +75,11 @@ byte(const u32 x, const unsigned n) struct aes_ctx { int key_length; - u32 E[60]; - u32 D[60]; + u32 buf[120]; }; -#define E_KEY ctx->E -#define D_KEY ctx->D +#define E_KEY (&ctx->buf[0]) +#define D_KEY (&ctx->buf[60]) static u8 pow_tab[256] __initdata; static u8 log_tab[256] __initdata; -- cgit v1.2.3-18-g5258