diff options
53 files changed, 4515 insertions, 1512 deletions
diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c index 4aba83b3159..2bc479ab3a6 100644 --- a/arch/s390/crypto/des_s390.c +++ b/arch/s390/crypto/des_s390.c @@ -250,8 +250,9 @@ static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key, const u8 *temp_key = key; u32 *flags = &tfm->crt_flags; - if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) { - *flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED; + if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE)) && + (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) { + *flags |= CRYPTO_TFM_RES_WEAK_KEY; return -EINVAL; } for (i = 0; i < 2; i++, temp_key += DES_KEY_SIZE) { @@ -411,9 +412,9 @@ static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key, if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && memcmp(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2], - DES_KEY_SIZE))) { - - *flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED; + DES_KEY_SIZE)) && + (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) { + *flags |= CRYPTO_TFM_RES_WEAK_KEY; return -EINVAL; } for (i = 0; i < 3; i++, temp_key += DES_KEY_SIZE) { diff --git a/arch/s390/crypto/sha1_s390.c b/arch/s390/crypto/sha1_s390.c index e85ba348722..f6de7826c97 100644 --- a/arch/s390/crypto/sha1_s390.c +++ b/arch/s390/crypto/sha1_s390.c @@ -46,12 +46,38 @@ static int sha1_init(struct shash_desc *desc) return 0; } +static int sha1_export(struct shash_desc *desc, void *out) +{ + struct s390_sha_ctx *sctx = shash_desc_ctx(desc); + struct sha1_state *octx = out; + + octx->count = sctx->count; + memcpy(octx->state, sctx->state, sizeof(octx->state)); + memcpy(octx->buffer, sctx->buf, sizeof(octx->buffer)); + return 0; +} + +static int sha1_import(struct shash_desc *desc, const void *in) +{ + struct s390_sha_ctx *sctx = shash_desc_ctx(desc); + const struct sha1_state *ictx = in; + + sctx->count = ictx->count; + memcpy(sctx->state, ictx->state, sizeof(ictx->state)); + memcpy(sctx->buf, ictx->buffer, sizeof(ictx->buffer)); + sctx->func = KIMD_SHA_1; + return 0; +} + static struct shash_alg alg = { .digestsize = SHA1_DIGEST_SIZE, .init = sha1_init, .update = s390_sha_update, .final = s390_sha_final, + .export = sha1_export, + .import = sha1_import, .descsize = sizeof(struct s390_sha_ctx), + .statesize = sizeof(struct sha1_state), .base = { .cra_name = "sha1", .cra_driver_name= "sha1-s390", diff --git a/arch/s390/crypto/sha256_s390.c b/arch/s390/crypto/sha256_s390.c index f9fefc56963..61a7db37212 100644 --- a/arch/s390/crypto/sha256_s390.c +++ b/arch/s390/crypto/sha256_s390.c @@ -42,12 +42,38 @@ static int sha256_init(struct shash_desc *desc) return 0; } +static int sha256_export(struct shash_desc *desc, void *out) +{ + struct s390_sha_ctx *sctx = shash_desc_ctx(desc); + struct sha256_state *octx = out; + + octx->count = sctx->count; + memcpy(octx->state, sctx->state, sizeof(octx->state)); + memcpy(octx->buf, sctx->buf, sizeof(octx->buf)); + return 0; +} + +static int sha256_import(struct shash_desc *desc, const void *in) +{ + struct s390_sha_ctx *sctx = shash_desc_ctx(desc); + const struct sha256_state *ictx = in; + + sctx->count = ictx->count; + memcpy(sctx->state, ictx->state, sizeof(ictx->state)); + memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf)); + sctx->func = KIMD_SHA_256; + return 0; +} + static struct shash_alg alg = { .digestsize = SHA256_DIGEST_SIZE, .init = sha256_init, .update = s390_sha_update, .final = s390_sha_final, + .export = sha256_export, + .import = sha256_import, .descsize = sizeof(struct s390_sha_ctx), + .statesize = sizeof(struct sha256_state), .base = { .cra_name = "sha256", .cra_driver_name= "sha256-s390", diff --git a/arch/s390/crypto/sha512_s390.c b/arch/s390/crypto/sha512_s390.c index 83192bfc804..4bf73d0dc52 100644 --- a/arch/s390/crypto/sha512_s390.c +++ b/arch/s390/crypto/sha512_s390.c @@ -13,7 +13,10 @@ * */ #include <crypto/internal/hash.h> +#include <crypto/sha.h> +#include <linux/errno.h> #include <linux/init.h> +#include <linux/kernel.h> #include <linux/module.h> #include "sha.h" @@ -37,12 +40,42 @@ static int sha512_init(struct shash_desc *desc) return 0; } +static int sha512_export(struct shash_desc *desc, void *out) +{ + struct s390_sha_ctx *sctx = shash_desc_ctx(desc); + struct sha512_state *octx = out; + + octx->count[0] = sctx->count; + octx->count[1] = 0; + memcpy(octx->state, sctx->state, sizeof(octx->state)); + memcpy(octx->buf, sctx->buf, sizeof(octx->buf)); + return 0; +} + +static int sha512_import(struct shash_desc *desc, const void *in) +{ + struct s390_sha_ctx *sctx = shash_desc_ctx(desc); + const struct sha512_state *ictx = in; + + if (unlikely(ictx->count[1])) + return -ERANGE; + sctx->count = ictx->count[0]; + + memcpy(sctx->state, ictx->state, sizeof(ictx->state)); + memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf)); + sctx->func = KIMD_SHA_512; + return 0; +} + static struct shash_alg sha512_alg = { .digestsize = SHA512_DIGEST_SIZE, .init = sha512_init, .update = s390_sha_update, .final = s390_sha_final, + .export = sha512_export, + .import = sha512_import, .descsize = sizeof(struct s390_sha_ctx), + .statesize = sizeof(struct sha512_state), .base = { .cra_name = "sha512", .cra_driver_name= "sha512-s390", @@ -78,7 +111,10 @@ static struct shash_alg sha384_alg = { .init = sha384_init, .update = s390_sha_update, .final = s390_sha_final, + .export = sha512_export, + .import = sha512_import, .descsize = sizeof(struct s390_sha_ctx), + .statesize = sizeof(struct sha512_state), .base = { .cra_name = "sha384", .cra_driver_name= "sha384-s390", diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c index c580c5ec1ca..d3ec8d588d4 100644 --- a/arch/x86/crypto/aesni-intel_glue.c +++ b/arch/x86/crypto/aesni-intel_glue.c @@ -636,7 +636,7 @@ static int __init aesni_init(void) int err; if (!cpu_has_aes) { - printk(KERN_ERR "Intel AES-NI instructions are not detected.\n"); + printk(KERN_INFO "Intel AES-NI instructions are not detected.\n"); return -ENODEV; } if ((err = crypto_register_alg(&aesni_alg))) diff --git a/crypto/Kconfig b/crypto/Kconfig index 4dfdd03e708..26b5dd0cb56 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -23,11 +23,13 @@ comment "Crypto core or helper" config CRYPTO_FIPS bool "FIPS 200 compliance" + depends on CRYPTO_ANSI_CPRNG help This options enables the fips boot option which is required if you want to system to operate in a FIPS 200 certification. You should say no unless you know what - this is. + this is. Note that CRYPTO_ANSI_CPRNG is requred if this + option is selected config CRYPTO_ALGAPI tristate @@ -156,7 +158,7 @@ config CRYPTO_GCM tristate "GCM/GMAC support" select CRYPTO_CTR select CRYPTO_AEAD - select CRYPTO_GF128MUL + select CRYPTO_GHASH help Support for Galois/Counter Mode (GCM) and Galois Message Authentication Code (GMAC). Required for IPSec. @@ -267,6 +269,18 @@ config CRYPTO_XCBC http://csrc.nist.gov/encryption/modes/proposedmodes/ xcbc-mac/xcbc-mac-spec.pdf +config CRYPTO_VMAC + tristate "VMAC support" + depends on EXPERIMENTAL + select CRYPTO_HASH + select CRYPTO_MANAGER + help + VMAC is a message authentication algorithm designed for + very high speed on 64-bit architectures. + + See also: + <http://fastcrypto.org/vmac> + comment "Digest" config CRYPTO_CRC32C @@ -289,6 +303,13 @@ config CRYPTO_CRC32C_INTEL gain performance compared with software implementation. Module will be crc32c-intel. +config CRYPTO_GHASH + tristate "GHASH digest algorithm" + select CRYPTO_SHASH + select CRYPTO_GF128MUL + help + GHASH is message digest algorithm for GCM (Galois/Counter Mode). + config CRYPTO_MD4 tristate "MD4 digest algorithm" select CRYPTO_HASH @@ -780,13 +801,14 @@ comment "Random Number Generation" config CRYPTO_ANSI_CPRNG tristate "Pseudo Random Number Generation for Cryptographic modules" + default m select CRYPTO_AES select CRYPTO_RNG - select CRYPTO_FIPS help This option enables the generic pseudo random number generator for cryptographic modules. Uses the Algorithm specified in - ANSI X9.31 A.2.4 + ANSI X9.31 A.2.4. Not this option must be enabled if CRYPTO_FIPS + is selected source "drivers/crypto/Kconfig" diff --git a/crypto/Makefile b/crypto/Makefile index 673d9f7c1bd..9e8f61908cb 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -3,7 +3,7 @@ # obj-$(CONFIG_CRYPTO) += crypto.o -crypto-objs := api.o cipher.o digest.o compress.o +crypto-objs := api.o cipher.o compress.o obj-$(CONFIG_CRYPTO_WORKQUEUE) += crypto_wq.o @@ -22,7 +22,6 @@ obj-$(CONFIG_CRYPTO_BLKCIPHER2) += chainiv.o obj-$(CONFIG_CRYPTO_BLKCIPHER2) += eseqiv.o obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o -crypto_hash-objs := hash.o crypto_hash-objs += ahash.o crypto_hash-objs += shash.o obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o @@ -33,6 +32,7 @@ cryptomgr-objs := algboss.o testmgr.o obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o obj-$(CONFIG_CRYPTO_HMAC) += hmac.o +obj-$(CONFIG_CRYPTO_VMAC) += vmac.o obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o obj-$(CONFIG_CRYPTO_MD4) += md4.o @@ -83,6 +83,7 @@ obj-$(CONFIG_CRYPTO_RNG2) += rng.o obj-$(CONFIG_CRYPTO_RNG2) += krng.o obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o +obj-$(CONFIG_CRYPTO_GHASH) += ghash-generic.o # # generic algorithms and the async_tx api diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c index e11ce37c710..f6f08336df5 100644 --- a/crypto/ablkcipher.c +++ b/crypto/ablkcipher.c @@ -14,6 +14,7 @@ */ #include <crypto/internal/skcipher.h> +#include <linux/cpumask.h> #include <linux/err.h> #include <linux/init.h> #include <linux/kernel.h> @@ -25,6 +26,8 @@ #include "internal.h" +static const char *skcipher_default_geniv __read_mostly; + static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key, unsigned int keylen) { @@ -180,7 +183,14 @@ EXPORT_SYMBOL_GPL(crypto_givcipher_type); const char *crypto_default_geniv(const struct crypto_alg *alg) { - return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv"; + if (((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == + CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize : + alg->cra_ablkcipher.ivsize) != + alg->cra_blocksize) + return "chainiv"; + + return alg->cra_flags & CRYPTO_ALG_ASYNC ? + "eseqiv" : skcipher_default_geniv; } static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask) @@ -201,8 +211,9 @@ static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask) int err; larval = crypto_larval_lookup(alg->cra_driver_name, + (type & ~CRYPTO_ALG_TYPE_MASK) | CRYPTO_ALG_TYPE_GIVCIPHER, - CRYPTO_ALG_TYPE_MASK); + mask | CRYPTO_ALG_TYPE_MASK); err = PTR_ERR(larval); if (IS_ERR(larval)) goto out; @@ -360,3 +371,17 @@ err: return ERR_PTR(err); } EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher); + +static int __init skcipher_module_init(void) +{ + skcipher_default_geniv = num_possible_cpus() > 1 ? + "eseqiv" : "chainiv"; + return 0; +} + +static void skcipher_module_exit(void) +{ +} + +module_init(skcipher_module_init); +module_exit(skcipher_module_exit); diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c index b8b66ec3883..e78b7ee44a7 100644 --- a/crypto/aes_generic.c +++ b/crypto/aes_generic.c @@ -1174,7 +1174,7 @@ EXPORT_SYMBOL_GPL(crypto_il_tab); ctx->key_enc[6 * i + 11] = t; \ } while (0) -#define loop8(i) do { \ +#define loop8tophalf(i) do { \ t = ror32(t, 8); \ t = ls_box(t) ^ rco_tab[i]; \ t ^= ctx->key_enc[8 * i]; \ @@ -1185,6 +1185,10 @@ EXPORT_SYMBOL_GPL(crypto_il_tab); ctx->key_enc[8 * i + 10] = t; \ t ^= ctx->key_enc[8 * i + 3]; \ ctx->key_enc[8 * i + 11] = t; \ +} while (0) + +#define loop8(i) do { \ + loop8tophalf(i); \ t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \ ctx->key_enc[8 * i + 12] = t; \ t ^= ctx->key_enc[8 * i + 5]; \ @@ -1245,8 +1249,9 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key, ctx->key_enc[5] = le32_to_cpu(key[5]); ctx->key_enc[6] = le32_to_cpu(key[6]); t = ctx->key_enc[7] = le32_to_cpu(key[7]); - for (i = 0; i < 7; ++i) + for (i = 0; i < 6; ++i) loop8(i); + loop8tophalf(i); break; } diff --git a/crypto/ahash.c b/crypto/ahash.c index f3476374f76..33a4ff45f84 100644 --- a/crypto/ahash.c +++ b/crypto/ahash.c @@ -24,6 +24,19 @@ #include "internal.h" +struct ahash_request_priv { + crypto_completion_t complete; + void *data; + u8 *result; + void *ubuf[] CRYPTO_MINALIGN_ATTR; +}; + +static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash) +{ + return container_of(crypto_hash_alg_common(hash), struct ahash_alg, + halg); +} + static int hash_walk_next(struct crypto_hash_walk *walk) { unsigned int alignmask = walk->alignmask; @@ -132,36 +145,34 @@ int crypto_hash_walk_first_compat(struct hash_desc *hdesc, static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen) { - struct ahash_alg *ahash = crypto_ahash_alg(tfm); unsigned long alignmask = crypto_ahash_alignmask(tfm); int ret; u8 *buffer, *alignbuffer; unsigned long absize; absize = keylen + alignmask; - buffer = kmalloc(absize, GFP_ATOMIC); + buffer = kmalloc(absize, GFP_KERNEL); if (!buffer) return -ENOMEM; alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); memcpy(alignbuffer, key, keylen); - ret = ahash->setkey(tfm, alignbuffer, keylen); - memset(alignbuffer, 0, keylen); - kfree(buffer); + ret = tfm->setkey(tfm, alignbuffer, keylen); + kzfree(buffer); return ret; } -static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key, +int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen) { - struct ahash_alg *ahash = crypto_ahash_alg(tfm); unsigned long alignmask = crypto_ahash_alignmask(tfm); if ((unsigned long)key & alignmask) return ahash_setkey_unaligned(tfm, key, keylen); - return ahash->setkey(tfm, key, keylen); + return tfm->setkey(tfm, key, keylen); } +EXPORT_SYMBOL_GPL(crypto_ahash_setkey); static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen) @@ -169,44 +180,221 @@ static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, return -ENOSYS; } -int crypto_ahash_import(struct ahash_request *req, const u8 *in) +static inline unsigned int ahash_align_buffer_size(unsigned len, + unsigned long mask) +{ + return len + (mask & ~(crypto_tfm_ctx_alignment() - 1)); +} + +static void ahash_op_unaligned_finish(struct ahash_request *req, int err) +{ + struct ahash_request_priv *priv = req->priv; + + if (err == -EINPROGRESS) + return; + + if (!err) + memcpy(priv->result, req->result, + crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); + + kzfree(priv); +} + +static void ahash_op_unaligned_done(struct crypto_async_request *req, int err) +{ + struct ahash_request *areq = req->data; + struct ahash_request_priv *priv = areq->priv; + crypto_completion_t complete = priv->complete; + void *data = priv->data; + + ahash_op_unaligned_finish(areq, err); + + complete(data, err); +} + +static int ahash_op_unaligned(struct ahash_request *req, + int (*op)(struct ahash_request *)) { struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); - struct ahash_alg *alg = crypto_ahash_alg(tfm); + unsigned long alignmask = crypto_ahash_alignmask(tfm); + unsigned int ds = crypto_ahash_digestsize(tfm); + struct ahash_request_priv *priv; + int err; + + priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), + (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? + GFP_KERNEL : GFP_ATOMIC); + if (!priv) + return -ENOMEM; - memcpy(ahash_request_ctx(req), in, crypto_ahash_reqsize(tfm)); + priv->result = req->result; + priv->complete = req->base.complete; + priv->data = req->base.data; - if (alg->reinit) - alg->reinit(req); + req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); + req->base.complete = ahash_op_unaligned_done; + req->base.data = req; + req->priv = priv; - return 0; + err = op(req); + ahash_op_unaligned_finish(req, err); + + return err; } -EXPORT_SYMBOL_GPL(crypto_ahash_import); -static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type, - u32 mask) +static int crypto_ahash_op(struct ahash_request *req, + int (*op)(struct ahash_request *)) { - return alg->cra_ctxsize; + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); + unsigned long alignmask = crypto_ahash_alignmask(tfm); + + if ((unsigned long)req->result & alignmask) + return ahash_op_unaligned(req, op); + + return op(req); } -static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) +int crypto_ahash_final(struct ahash_request *req) { - struct ahash_alg *alg = &tfm->__crt_alg->cra_ahash; - struct ahash_tfm *crt = &tfm->crt_ahash; + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final); +} +EXPORT_SYMBOL_GPL(crypto_ahash_final); - if (alg->digestsize > PAGE_SIZE / 8) - return -EINVAL; +int crypto_ahash_finup(struct ahash_request *req) +{ + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup); +} +EXPORT_SYMBOL_GPL(crypto_ahash_finup); + +int crypto_ahash_digest(struct ahash_request *req) +{ + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest); +} +EXPORT_SYMBOL_GPL(crypto_ahash_digest); + +static void ahash_def_finup_finish2(struct ahash_request *req, int err) +{ + struct ahash_request_priv *priv = req->priv; + + if (err == -EINPROGRESS) + return; + + if (!err) + memcpy(priv->result, req->result, + crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); - crt->init = alg->init; - crt->update = alg->update; - crt->final = alg->final; - crt->digest = alg->digest; - crt->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey; - crt->digestsize = alg->digestsize; + kzfree(priv); +} + +static void ahash_def_finup_done2(struct crypto_async_request *req, int err) +{ + struct ahash_request *areq = req->data; + struct ahash_request_priv *priv = areq->priv; + crypto_completion_t complete = priv->complete; + void *data = priv->data; + + ahash_def_finup_finish2(areq, err); + + complete(data, err); +} + +static int ahash_def_finup_finish1(struct ahash_request *req, int err) +{ + if (err) + goto out; + + req->base.complete = ahash_def_finup_done2; + req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; + err = crypto_ahash_reqtfm(req)->final(req); + +out: + ahash_def_finup_finish2(req, err); + return err; +} + +static void ahash_def_finup_done1(struct crypto_async_request *req, int err) +{ + struct ahash_request *areq = req->data; + struct ahash_request_priv *priv = areq->priv; + crypto_completion_t complete = priv->complete; + void *data = priv->data; + + err = ahash_def_finup_finish1(areq, err); + + complete(data, err); +} + +static int ahash_def_finup(struct ahash_request *req) +{ + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); + unsigned long alignmask = crypto_ahash_alignmask(tfm); + unsigned int ds = crypto_ahash_digestsize(tfm); + struct ahash_request_priv *priv; + + priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), + (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? + GFP_KERNEL : GFP_ATOMIC); + if (!priv) + return -ENOMEM; + + priv->result = req->result; + priv->complete = req->base.complete; + priv->data = req->base.data; + + req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); + req->base.complete = ahash_def_finup_done1; + req->base.data = req; + req->priv = priv; + + return ahash_def_finup_finish1(req, tfm->update(req)); +} + +static int ahash_no_export(struct ahash_request *req, void *out) +{ + return -ENOSYS; +} + +static int ahash_no_import(struct ahash_request *req, const void *in) +{ + return -ENOSYS; +} + +static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_ahash *hash = __crypto_ahash_cast(tfm); + struct ahash_alg *alg = crypto_ahash_alg(hash); + + hash->setkey = ahash_nosetkey; + hash->export = ahash_no_export; + hash->import = ahash_no_import; + + if (tfm->__crt_alg->cra_type != &crypto_ahash_type) + return crypto_init_shash_ops_async(tfm); + + hash->init = alg->init; + hash->update = alg->update; + hash->final = alg->final; + hash->finup = alg->finup ?: ahash_def_finup; + hash->digest = alg->digest; + + if (alg->setkey) + hash->setkey = alg->setkey; + if (alg->export) + hash->export = alg->export; + if (alg->import) + hash->import = alg->import; return 0; } +static unsigned int crypto_ahash_extsize(struct crypto_alg *alg) +{ + if (alg->cra_type == &crypto_ahash_type) + return alg->cra_ctxsize; + + return sizeof(struct crypto_shash *); +} + static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) __attribute__ ((unused)); |