aboutsummaryrefslogtreecommitdiff
path: root/arch
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2012-05-23 15:59:10 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2012-05-23 15:59:10 -0700
commit0bd3fbd4abeafa19ae0302d25194468b022d1a56 (patch)
tree1fc34b25666c97b85dfb7199e48b2e074ffde264 /arch
parent0b87da68a0f0a7bf7f7446cf64f92e672bd68ef8 (diff)
parentef45b834319f8a18f257a40ba4bce6b829ef1708 (diff)
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu: - New cipher/hash driver for ARM ux500. - Code clean-up for aesni-intel. - Misc fixes. Fixed up conflicts in arch/arm/mach-ux500/devices-common.h, where quite frankly some of it made no sense at all (the pull brought in a declaration for the dbx500_add_platform_device_noirq() function, which neither exists nor is used anywhere). Also some trivial add-add context conflicts in the Kconfig file in drivers/{char/hw_random,crypto}/ * git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: crypto: aesni-intel - move more common code to ablk_init_common crypto: aesni-intel - use crypto_[un]register_algs crypto: ux500 - Cleanup hardware identification crypto: ux500 - Update DMA handling for 3.4 mach-ux500: crypto - core support for CRYP/HASH module. crypto: ux500 - Add driver for HASH hardware crypto: ux500 - Add driver for CRYP hardware hwrng: Kconfig - modify default state for atmel-rng driver hwrng: omap - use devm_request_and_ioremap crypto: crypto4xx - move up err_request_irq label crypto, xor: Sanitize checksumming function selection output crypto: caam - add backward compatible string sec4.0
Diffstat (limited to 'arch')
-rw-r--r--arch/arm/mach-ux500/board-mop500.c48
-rw-r--r--arch/arm/mach-ux500/clock.c18
-rw-r--r--arch/arm/mach-ux500/devices-common.h50
-rw-r--r--arch/arm/mach-ux500/devices-db8500.c3
-rw-r--r--arch/arm/mach-ux500/devices-db8500.h4
-rw-r--r--arch/arm/mach-ux500/include/mach/crypto-ux500.h22
-rw-r--r--arch/arm/mach-ux500/include/mach/devices.h3
-rw-r--r--arch/arm/mach-ux500/include/mach/hardware.h3
-rw-r--r--arch/x86/crypto/aesni-intel_glue.c793
9 files changed, 461 insertions, 483 deletions
diff --git a/arch/arm/mach-ux500/board-mop500.c b/arch/arm/mach-ux500/board-mop500.c
index 4bc0cbc5f07..f943687acaf 100644
--- a/arch/arm/mach-ux500/board-mop500.c
+++ b/arch/arm/mach-ux500/board-mop500.c
@@ -47,6 +47,7 @@
#include <mach/setup.h>
#include <mach/devices.h>
#include <mach/irqs.h>
+#include <mach/crypto-ux500.h>
#include "ste-dma40-db8500.h"
#include "devices-db8500.h"
@@ -417,6 +418,45 @@ static void mop500_prox_deactivate(struct device *dev)
regulator_put(prox_regulator);
}
+static struct cryp_platform_data u8500_cryp1_platform_data = {
+ .mem_to_engine = {
+ .dir = STEDMA40_MEM_TO_PERIPH,
+ .src_dev_type = STEDMA40_DEV_SRC_MEMORY,
+ .dst_dev_type = DB8500_DMA_DEV48_CAC1_TX,
+ .src_info.data_width = STEDMA40_WORD_WIDTH,
+ .dst_info.data_width = STEDMA40_WORD_WIDTH,
+ .mode = STEDMA40_MODE_LOGICAL,
+ .src_info.psize = STEDMA40_PSIZE_LOG_4,
+ .dst_info.psize = STEDMA40_PSIZE_LOG_4,
+ },
+ .engine_to_mem = {
+ .dir = STEDMA40_PERIPH_TO_MEM,
+ .src_dev_type = DB8500_DMA_DEV48_CAC1_RX,
+ .dst_dev_type = STEDMA40_DEV_DST_MEMORY,
+ .src_info.data_width = STEDMA40_WORD_WIDTH,
+ .dst_info.data_width = STEDMA40_WORD_WIDTH,
+ .mode = STEDMA40_MODE_LOGICAL,
+ .src_info.psize = STEDMA40_PSIZE_LOG_4,
+ .dst_info.psize = STEDMA40_PSIZE_LOG_4,
+ }
+};
+
+static struct stedma40_chan_cfg u8500_hash_dma_cfg_tx = {
+ .dir = STEDMA40_MEM_TO_PERIPH,
+ .src_dev_type = STEDMA40_DEV_SRC_MEMORY,
+ .dst_dev_type = DB8500_DMA_DEV50_HAC1_TX,
+ .src_info.data_width = STEDMA40_WORD_WIDTH,
+ .dst_info.data_width = STEDMA40_WORD_WIDTH,
+ .mode = STEDMA40_MODE_LOGICAL,
+ .src_info.psize = STEDMA40_PSIZE_LOG_16,
+ .dst_info.psize = STEDMA40_PSIZE_LOG_16,
+};
+
+static struct hash_platform_data u8500_hash1_platform_data = {
+ .mem_to_engine = &u8500_hash_dma_cfg_tx,
+ .dma_filter = stedma40_filter,
+};
+
/* add any platform devices here - TODO */
static struct platform_device *mop500_platform_devs[] __initdata = {
&mop500_gpio_keys_device,
@@ -624,6 +664,12 @@ static void __init mop500_uart_init(struct device *parent)
db8500_add_uart2(parent, &uart2_plat);
}
+static void __init u8500_cryp1_hash1_init(struct device *parent)
+{
+ db8500_add_cryp1(parent, &u8500_cryp1_platform_data);
+ db8500_add_hash1(parent, &u8500_hash1_platform_data);
+}
+
static struct platform_device *snowball_platform_devs[] __initdata = {
&snowball_led_dev,
&snowball_key_dev,
@@ -654,6 +700,8 @@ static void __init mop500_init_machine(void)
mop500_msp_init(parent);
mop500_uart_init(parent);
+ u8500_cryp1_hash1_init(parent);
+
i2c0_devs = ARRAY_SIZE(mop500_i2c0_devices);
i2c_register_board_info(0, mop500_i2c0_devices, i2c0_devs);
diff --git a/arch/arm/mach-ux500/clock.c b/arch/arm/mach-ux500/clock.c
index a121cb472dd..1762c4728f1 100644
--- a/arch/arm/mach-ux500/clock.c
+++ b/arch/arm/mach-ux500/clock.c
@@ -381,14 +381,15 @@ static DEFINE_PRCC_CLK(5, usb, 0, 0, NULL);
/* Peripheral Cluster #6 */
/* MTU ID in data */
-static DEFINE_PRCC_CLK_CUSTOM(6, mtu1, 8, -1, NULL, clk_mtu_get_rate, 1);
-static DEFINE_PRCC_CLK_CUSTOM(6, mtu0, 7, -1, NULL, clk_mtu_get_rate, 0);
-static DEFINE_PRCC_CLK(6, cfgreg, 6, 6, NULL);
-static DEFINE_PRCC_CLK(6, hash1, 5, -1, NULL);
-static DEFINE_PRCC_CLK(6, unipro, 4, 1, &clk_uniproclk);
-static DEFINE_PRCC_CLK(6, pka, 3, -1, NULL);
-static DEFINE_PRCC_CLK(6, hash0, 2, -1, NULL);
-static DEFINE_PRCC_CLK(6, cryp0, 1, -1, NULL);
+static DEFINE_PRCC_CLK_CUSTOM(6, mtu1, 9, -1, NULL, clk_mtu_get_rate, 1);
+static DEFINE_PRCC_CLK_CUSTOM(6, mtu0, 8, -1, NULL, clk_mtu_get_rate, 0);
+static DEFINE_PRCC_CLK(6, cfgreg, 7, 7, NULL);
+static DEFINE_PRCC_CLK(6, hash1, 6, -1, NULL);
+static DEFINE_PRCC_CLK(6, unipro, 5, 1, &clk_uniproclk);
+static DEFINE_PRCC_CLK(6, pka, 4, -1, NULL);
+static DEFINE_PRCC_CLK(6, hash0, 3, -1, NULL);
+static DEFINE_PRCC_CLK(6, cryp0, 2, -1, NULL);
+static DEFINE_PRCC_CLK(6, cryp1, 1, -1, NULL);
static DEFINE_PRCC_CLK(6, rng, 0, 0, &clk_rngclk);
static struct clk clk_dummy_apb_pclk = {
@@ -430,6 +431,7 @@ static struct clk_lookup u8500_clks[] = {
CLK(pka, "pka", NULL),
CLK(hash0, "hash0", NULL),
CLK(cryp0, "cryp0", NULL),
+ CLK(cryp1, "cryp1", NULL),
/* PRCMU level clock gating */
diff --git a/arch/arm/mach-ux500/devices-common.h b/arch/arm/mach-ux500/devices-common.h
index 7cbccfd9e15..6e470656026 100644
--- a/arch/arm/mach-ux500/devices-common.h
+++ b/arch/arm/mach-ux500/devices-common.h
@@ -13,6 +13,7 @@
#include <linux/sys_soc.h>
#include <linux/amba/bus.h>
#include <plat/i2c.h>
+#include <mach/crypto-ux500.h>
struct spi_master_cntlr;
@@ -85,6 +86,55 @@ dbx500_add_rtc(struct device *parent, resource_size_t base, int irq)
0, NULL, 0);
}
+struct cryp_platform_data;
+
+static inline struct platform_device *
+dbx500_add_cryp1(struct device *parent, int id, resource_size_t base, int irq,
+ struct cryp_platform_data *pdata)
+{
+ struct resource res[] = {
+ DEFINE_RES_MEM(base, SZ_4K),
+ DEFINE_RES_IRQ(irq),
+ };
+
+ struct platform_device_info pdevinfo = {
+ .parent = parent,
+ .name = "cryp1",
+ .id = id,
+ .res = res,
+ .num_res = ARRAY_SIZE(res),
+ .data = pdata,
+ .size_data = sizeof(*pdata),
+ .dma_mask = DMA_BIT_MASK(32),
+ };
+
+ return platform_device_register_full(&pdevinfo);
+}
+
+struct hash_platform_data;
+
+static inline struct platform_device *
+dbx500_add_hash1(struct device *parent, int id, resource_size_t base,
+ struct hash_platform_data *pdata)
+{
+ struct resource res[] = {
+ DEFINE_RES_MEM(base, SZ_4K),
+ };
+
+ struct platform_device_info pdevinfo = {
+ .parent = parent,
+ .name = "hash1",
+ .id = id,
+ .res = res,
+ .num_res = ARRAY_SIZE(res),
+ .data = pdata,
+ .size_data = sizeof(*pdata),
+ .dma_mask = DMA_BIT_MASK(32),
+ };
+
+ return platform_device_register_full(&pdevinfo);
+}
+
struct nmk_gpio_platform_data;
void dbx500_add_gpios(struct device *parent, resource_size_t *base, int num,
diff --git a/arch/arm/mach-ux500/devices-db8500.c b/arch/arm/mach-ux500/devices-db8500.c
index 6e66d3777ed..91754a8a0d4 100644
--- a/arch/arm/mach-ux500/devices-db8500.c
+++ b/arch/arm/mach-ux500/devices-db8500.c
@@ -104,6 +104,8 @@ static const dma_addr_t dma40_tx_map[DB8500_DMA_NR_DEV] = {
[DB8500_DMA_DEV14_MSP2_TX] = U8500_MSP2_BASE + MSP_TX_RX_REG_OFFSET,
[DB8500_DMA_DEV30_MSP1_TX] = U8500_MSP1_BASE + MSP_TX_RX_REG_OFFSET,
[DB8500_DMA_DEV31_MSP0_TX_SLIM0_CH0_TX] = U8500_MSP0_BASE + MSP_TX_RX_REG_OFFSET,
+ [DB8500_DMA_DEV48_CAC1_TX] = U8500_CRYP1_BASE + CRYP1_TX_REG_OFFSET,
+ [DB8500_DMA_DEV50_HAC1_TX] = U8500_HASH1_BASE + HASH1_TX_REG_OFFSET,
};
/* Mapping between source event lines and physical device address */
@@ -139,6 +141,7 @@ static const dma_addr_t dma40_rx_map[DB8500_DMA_NR_DEV] = {
[DB8500_DMA_DEV14_MSP2_RX] = U8500_MSP2_BASE + MSP_TX_RX_REG_OFFSET,
[DB8500_DMA_DEV30_MSP3_RX] = U8500_MSP3_BASE + MSP_TX_RX_REG_OFFSET,
[DB8500_DMA_DEV31_MSP0_RX_SLIM0_CH0_RX] = U8500_MSP0_BASE + MSP_TX_RX_REG_OFFSET,
+ [DB8500_DMA_DEV48_CAC1_RX] = U8500_CRYP1_BASE + CRYP1_RX_REG_OFFSET,
};
/* Reserved event lines for memcpy only */
diff --git a/arch/arm/mach-ux500/devices-db8500.h b/arch/arm/mach-ux500/devices-db8500.h
index 0b9677a95bb..3c8010f4fb3 100644
--- a/arch/arm/mach-ux500/devices-db8500.h
+++ b/arch/arm/mach-ux500/devices-db8500.h
@@ -114,4 +114,8 @@ db8500_add_ssp(struct device *parent, const char *name, resource_size_t base,
dbx500_add_uart(parent, "uart2", U8500_UART2_BASE, \
IRQ_DB8500_UART2, pdata)
+#define db8500_add_cryp1(parent, pdata) \
+ dbx500_add_cryp1(parent, -1, U8500_CRYP1_BASE, IRQ_DB8500_CRYP1, pdata)
+#define db8500_add_hash1(parent, pdata) \
+ dbx500_add_hash1(parent, -1, U8500_HASH1_BASE, pdata)
#endif
diff --git a/arch/arm/mach-ux500/include/mach/crypto-ux500.h b/arch/arm/mach-ux500/include/mach/crypto-ux500.h
new file mode 100644
index 00000000000..5b2d0817e26
--- /dev/null
+++ b/arch/arm/mach-ux500/include/mach/crypto-ux500.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) ST-Ericsson SA 2011
+ *
+ * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson
+ * License terms: GNU General Public License (GPL) version 2
+ */
+#ifndef _CRYPTO_UX500_H
+#define _CRYPTO_UX500_H
+#include <linux/dmaengine.h>
+#include <plat/ste_dma40.h>
+
+struct hash_platform_data {
+ void *mem_to_engine;
+ bool (*dma_filter)(struct dma_chan *chan, void *filter_param);
+};
+
+struct cryp_platform_data {
+ struct stedma40_chan_cfg mem_to_engine;
+ struct stedma40_chan_cfg engine_to_mem;
+};
+
+#endif
diff --git a/arch/arm/mach-ux500/include/mach/devices.h b/arch/arm/mach-ux500/include/mach/devices.h
index 9b5eb69a015..cbc6f1e4104 100644
--- a/arch/arm/mach-ux500/include/mach/devices.h
+++ b/arch/arm/mach-ux500/include/mach/devices.h
@@ -14,6 +14,9 @@ extern struct platform_device u8500_gpio_devs[];
extern struct amba_device ux500_pl031_device;
+extern struct platform_device ux500_hash1_device;
+extern struct platform_device ux500_cryp1_device;
+
extern struct platform_device u8500_dma40_device;
extern struct platform_device ux500_ske_keypad_device;
diff --git a/arch/arm/mach-ux500/include/mach/hardware.h b/arch/arm/mach-ux500/include/mach/hardware.h
index 808c1d6601c..28d16e744bf 100644
--- a/arch/arm/mach-ux500/include/mach/hardware.h
+++ b/arch/arm/mach-ux500/include/mach/hardware.h
@@ -33,6 +33,9 @@
#include <mach/db8500-regs.h>
#define MSP_TX_RX_REG_OFFSET 0
+#define CRYP1_RX_REG_OFFSET 0x10
+#define CRYP1_TX_REG_OFFSET 0x8
+#define HASH1_TX_REG_OFFSET 0x4
#ifndef __ASSEMBLY__
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c
index c799352e24f..ac7f5cd019e 100644
--- a/arch/x86/crypto/aesni-intel_glue.c
+++ b/arch/x86/crypto/aesni-intel_glue.c
@@ -222,27 +222,6 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
}
}
-static struct crypto_alg aesni_alg = {
- .cra_name = "aes",
- .cra_driver_name = "aes-aesni",
- .cra_priority = 300,
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
- .cra_blocksize = AES_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
- .cra_alignmask = 0,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(aesni_alg.cra_list),
- .cra_u = {
- .cipher = {
- .cia_min_keysize = AES_MIN_KEY_SIZE,
- .cia_max_keysize = AES_MAX_KEY_SIZE,
- .cia_setkey = aes_set_key,
- .cia_encrypt = aes_encrypt,
- .cia_decrypt = aes_decrypt
- }
- }
-};
-
static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
{
struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
@@ -257,27 +236,6 @@ static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
aesni_dec(ctx, dst, src);
}
-static struct crypto_alg __aesni_alg = {
- .cra_name = "__aes-aesni",
- .cra_driver_name = "__driver-aes-aesni",
- .cra_priority = 0,
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
- .cra_blocksize = AES_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
- .cra_alignmask = 0,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(__aesni_alg.cra_list),
- .cra_u = {
- .cipher = {
- .cia_min_keysize = AES_MIN_KEY_SIZE,
- .cia_max_keysize = AES_MAX_KEY_SIZE,
- .cia_setkey = aes_set_key,
- .cia_encrypt = __aes_encrypt,
- .cia_decrypt = __aes_decrypt
- }
- }
-};
-
static int ecb_encrypt(struct blkcipher_desc *desc,
struct scatterlist *dst, struct scatterlist *src,
unsigned int nbytes)
@@ -326,28 +284,6 @@ static int ecb_decrypt(struct blkcipher_desc *desc,
return err;
}
-static struct crypto_alg blk_ecb_alg = {
- .cra_name = "__ecb-aes-aesni",
- .cra_driver_name = "__driver-ecb-aes-aesni",
- .cra_priority = 0,
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
- .cra_blocksize = AES_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
- .cra_alignmask = 0,
- .cra_type = &crypto_blkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
- .cra_u = {
- .blkcipher = {
- .min_keysize = AES_MIN_KEY_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE,
- .setkey = aes_set_key,
- .encrypt = ecb_encrypt,
- .decrypt = ecb_decrypt,
- },
- },
-};
-
static int cbc_encrypt(struct blkcipher_desc *desc,
struct scatterlist *dst, struct scatterlist *src,
unsigned int nbytes)
@@ -396,28 +332,6 @@ static int cbc_decrypt(struct blkcipher_desc *desc,
return err;
}
-static struct crypto_alg blk_cbc_alg = {
- .cra_name = "__cbc-aes-aesni",
- .cra_driver_name = "__driver-cbc-aes-aesni",
- .cra_priority = 0,
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
- .cra_blocksize = AES_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
- .cra_alignmask = 0,
- .cra_type = &crypto_blkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
- .cra_u = {
- .blkcipher = {
- .min_keysize = AES_MIN_KEY_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE,
- .setkey = aes_set_key,
- .encrypt = cbc_encrypt,
- .decrypt = cbc_decrypt,
- },
- },
-};
-
#ifdef CONFIG_X86_64
static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
struct blkcipher_walk *walk)
@@ -461,29 +375,6 @@ static int ctr_crypt(struct blkcipher_desc *desc,
return err;
}
-
-static struct crypto_alg blk_ctr_alg = {
- .cra_name = "__ctr-aes-aesni",
- .cra_driver_name = "__driver-ctr-aes-aesni",
- .cra_priority = 0,
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
- .cra_alignmask = 0,
- .cra_type = &crypto_blkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(blk_ctr_alg.cra_list),
- .cra_u = {
- .blkcipher = {
- .min_keysize = AES_MIN_KEY_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE,
- .ivsize = AES_BLOCK_SIZE,
- .setkey = aes_set_key,
- .encrypt = ctr_crypt,
- .decrypt = ctr_crypt,
- },
- },
-};
#endif
static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
@@ -551,281 +442,65 @@ static void ablk_exit(struct crypto_tfm *tfm)
cryptd_free_ablkcipher(ctx->cryptd_tfm);
}
-static void ablk_init_common(struct crypto_tfm *tfm,
- struct cryptd_ablkcipher *cryptd_tfm)
+static int ablk_init_common(struct crypto_tfm *tfm, const char *drv_name)
{
struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct cryptd_ablkcipher *cryptd_tfm;
+
+ cryptd_tfm = cryptd_alloc_ablkcipher(drv_name, 0, 0);
+ if (IS_ERR(cryptd_tfm))
+ return PTR_ERR(cryptd_tfm);
ctx->cryptd_tfm = cryptd_tfm;
tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +
crypto_ablkcipher_reqsize(&cryptd_tfm->base);
+
+ return 0;
}
static int ablk_ecb_init(struct crypto_tfm *tfm)
{
- struct cryptd_ablkcipher *cryptd_tfm;
-
- cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
- if (IS_ERR(cryptd_tfm))
- return PTR_ERR(cryptd_tfm);
- ablk_init_common(tfm, cryptd_tfm);
- return 0;
+ return ablk_init_common(tfm, "__driver-ecb-aes-aesni");
}
-static struct crypto_alg ablk_ecb_alg = {
- .cra_name = "ecb(aes)",
- .cra_driver_name = "ecb-aes-aesni",
- .cra_priority = 400,
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
- .cra_blocksize = AES_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct async_aes_ctx),
- .cra_alignmask = 0,
- .cra_type = &crypto_ablkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(ablk_ecb_alg.cra_list),
- .cra_init = ablk_ecb_init,
- .cra_exit = ablk_exit,
- .cra_u = {
- .ablkcipher = {
- .min_keysize = AES_MIN_KEY_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE,
- .setkey = ablk_set_key,
- .encrypt = ablk_encrypt,
- .decrypt = ablk_decrypt,
- },
- },
-};
-
static int ablk_cbc_init(struct crypto_tfm *tfm)
{
- struct cryptd_ablkcipher *cryptd_tfm;
-
- cryptd_tfm = cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
- if (IS_ERR(cryptd_tfm))
- return PTR_ERR(cryptd_tfm);
- ablk_init_common(tfm, cryptd_tfm);
- return 0;
+ return ablk_init_common(tfm, "__driver-cbc-aes-aesni");
}
-static struct crypto_alg ablk_cbc_alg = {
- .cra_name = "cbc(aes)",
- .cra_driver_name = "cbc-aes-aesni",
- .cra_priority = 400,
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
- .cra_blocksize = AES_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct async_aes_ctx),
- .cra_alignmask = 0,
- .cra_type = &crypto_ablkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(ablk_cbc_alg.cra_list),
- .cra_init = ablk_cbc_init,
- .cra_exit = ablk_exit,
- .cra_u = {
- .ablkcipher = {
- .min_keysize = AES_MIN_KEY_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE,
- .ivsize = AES_BLOCK_SIZE,
- .setkey = ablk_set_key,
- .encrypt = ablk_encrypt,
- .decrypt = ablk_decrypt,
- },
- },
-};
-
#ifdef CONFIG_X86_64
static int ablk_ctr_init(struct crypto_tfm *tfm)
{
- struct cryptd_ablkcipher *cryptd_tfm;
-
- cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ctr-aes-aesni", 0, 0);
- if (IS_ERR(cryptd_tfm))
- return PTR_ERR(cryptd_tfm);
- ablk_init_common(tfm, cryptd_tfm);
- return 0;
+ return ablk_init_common(tfm, "__driver-ctr-aes-aesni");
}
-static struct crypto_alg ablk_ctr_alg = {
- .cra_name = "ctr(aes)",
- .cra_driver_name = "ctr-aes-aesni",
- .cra_priority = 400,
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct async_aes_ctx),
- .cra_alignmask = 0,
- .cra_type = &crypto_ablkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(ablk_ctr_alg.cra_list),
- .cra_init = ablk_ctr_init,
- .cra_exit = ablk_exit,
- .cra_u = {
- .ablkcipher = {
- .min_keysize = AES_MIN_KEY_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE,
- .ivsize = AES_BLOCK_SIZE,
- .setkey = ablk_set_key,
- .encrypt = ablk_encrypt,
- .decrypt = ablk_encrypt,
- .geniv = "chainiv",
- },
- },
-};
-
#ifdef HAS_CTR
static int ablk_rfc3686_ctr_init(struct crypto_tfm *tfm)
{
- struct cryptd_ablkcipher *cryptd_tfm;
-
- cryptd_tfm = cryptd_alloc_ablkcipher(
- "rfc3686(__driver-ctr-aes-aesni)", 0, 0);
- if (IS_ERR(cryptd_tfm))
- return PTR_ERR(cryptd_tfm);
- ablk_init_common(tfm, cryptd_tfm);
- return 0;
+ return ablk_init_common(tfm, "rfc3686(__driver-ctr-aes-aesni)");
}
-
-static struct crypto_alg ablk_rfc3686_ctr_alg = {
- .cra_name = "rfc3686(ctr(aes))",
- .cra_driver_name = "rfc3686-ctr-aes-aesni",
- .cra_priority = 400,
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct async_aes_ctx),
- .cra_alignmask = 0,
- .cra_type = &crypto_ablkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(ablk_rfc3686_ctr_alg.cra_list),
- .cra_init = ablk_rfc3686_ctr_init,
- .cra_exit = ablk_exit,
- .cra_u = {
- .ablkcipher = {
- .min_keysize = AES_MIN_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,
- .ivsize = CTR_RFC3686_IV_SIZE,
- .setkey = ablk_set_key,
- .encrypt = ablk_encrypt,
- .decrypt = ablk_decrypt,
- .geniv = "seqiv",
- },
- },
-};
#endif
#endif
#ifdef HAS_LRW
static int ablk_lrw_init(struct crypto_tfm *tfm)
{
- struct cryptd_ablkcipher *cryptd_tfm;
-
- cryptd_tfm = cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
- 0, 0);
- if (IS_ERR(cryptd_tfm))
- return PTR_ERR(cryptd_tfm);
- ablk_init_common(tfm, cryptd_tfm);
- return 0;
+ return ablk_init_common(tfm, "fpu(lrw(__driver-aes-aesni))");
}
-
-static struct crypto_alg ablk_lrw_alg = {
- .cra_name = "lrw(aes)",
- .cra_driver_name = "lrw-aes-aesni",
- .cra_priority = 400,
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
- .cra_blocksize = AES_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct async_aes_ctx),
- .cra_alignmask = 0,
- .cra_type = &crypto_ablkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(ablk_lrw_alg.cra_list),
- .cra_init = ablk_lrw_init,
- .cra_exit = ablk_exit,
- .cra_u = {
- .ablkcipher = {
- .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
- .ivsize = AES_BLOCK_SIZE,
- .setkey = ablk_set_key,
- .encrypt = ablk_encrypt,
- .decrypt = ablk_decrypt,
- },
- },
-};
#endif
#ifdef HAS_PCBC
static int ablk_pcbc_init(struct crypto_tfm *tfm)
{
- struct cryptd_ablkcipher *cryptd_tfm;
-
- cryptd_tfm = cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
- 0, 0);
- if (IS_ERR(cryptd_tfm))
- return PTR_ERR(cryptd_tfm);
- ablk_init_common(tfm, cryptd_tfm);
- return 0;
+ return ablk_init_common(tfm, "fpu(pcbc(__driver-aes-aesni))");
}
-
-static struct crypto_alg ablk_pcbc_alg = {
- .cra_name = "pcbc(aes)",
- .cra_driver_name = "pcbc-aes-aesni",
- .cra_priority = 400,
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
- .cra_blocksize = AES_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct async_aes_ctx),
- .cra_alignmask = 0,
- .cra_type = &crypto_ablkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(ablk_pcbc_alg.cra_list),
- .cra_init = ablk_pcbc_init,
- .cra_exit = ablk_exit,
- .cra_u = {
- .ablkcipher = {
- .min_keysize = AES_MIN_KEY_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE,
- .ivsize = AES_BLOCK_SIZE,
- .setkey = ablk_set_key,
- .encrypt = ablk_encrypt,
- .decrypt = ablk_decrypt,
- },
- },
-};
#endif
#ifdef HAS_XTS
static int ablk_xts_init(struct crypto_tfm *tfm)
{
- struct cryptd_ablkcipher *cryptd_tfm;
-
- cryptd_tfm = cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
- 0, 0);
- if (IS_ERR(cryptd_tfm))
- return PTR_ERR(cryptd_tfm);
- ablk_init_common(tfm, cryptd_tfm);
- return 0;
+ return ablk_init_common(tfm, "fpu(xts(__driver-aes-aesni))");
}
-
-static struct crypto_alg ablk_xts_alg = {
- .cra_name = "xts(aes)",
- .cra_driver_name = "xts-aes-aesni",
- .cra_priority = 400,
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
- .cra_blocksize = AES_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct async_aes_ctx),
- .cra_alignmask = 0,
- .cra_type = &crypto_ablkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(ablk_xts_alg.cra_list),
- .cra_init = ablk_xts_init,
- .cra_exit = ablk_exit,
- .cra_u = {
- .ablkcipher = {
- .min_keysize = 2 * AES_MIN_KEY_SIZE,
- .max_keysize = 2 * AES_MAX_KEY_SIZE,
- .ivsize = AES_BLOCK_SIZE,
- .setkey = ablk_set_key,
- .encrypt = ablk_encrypt,
- .decrypt = ablk_decrypt,
- },
- },
-};
#endif
#ifdef CONFIG_X86_64
@@ -1050,32 +725,6 @@ static int rfc4106_decrypt(struct aead_request *req)
}
}
-static struct crypto_alg rfc4106_alg = {
- .cra_name = "rfc4106(gcm(aes))",
- .cra_driver_name = "rfc4106-gcm-aesni",
- .cra_priority = 400,
- .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC,
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) + AESNI_ALIGN,
- .cra_alignmask = 0,
- .cra_type = &crypto_nivaead_type,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(rfc4106_alg.cra_list),
- .cra_init = rfc4106_init,
- .cra_exit = rfc4106_exit,
- .cra_u = {
- .aead = {
- .setkey = rfc4106_set_key,
- .setauthsize = rfc4106_set_authsize,
- .encrypt = rfc4106_encrypt,
- .decrypt = rfc4106_decrypt,
- .geniv = "seqiv",
- .ivsize = 8,
- .maxauthsize = 16,
- },
- },
-};
-
static int __driver_rfc4106_encrypt(struct aead_request *req)
{
u8 one_entry_in_sg = 0;
@@ -1233,26 +882,316 @@ static int __driver_rfc4106_decrypt(struct aead_request *req)
}
return retval;
}
+#endif
-static struct crypto_alg __rfc4106_alg = {
+static struct crypto_alg aesni_algs[] = { {
+ .cra_name = "aes",
+ .cra_driver_name = "aes-aesni",
+ .cra_priority = 300,
+ .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
+ AESNI_ALIGN - 1,
+ .cra_alignmask = 0,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
+ .cipher = {
+ .cia_min_keysize = AES_MIN_KEY_SIZE,
+ .cia_max_keysize = AES_MAX_KEY_SIZE,
+ .cia_setkey = aes_set_key,
+ .cia_encrypt = aes_encrypt,
+ .cia_decrypt = aes_decrypt
+ }
+ }
+}, {
+ .cra_name = "__aes-aesni",
+ .cra_driver_name = "__driver-aes-aesni",
+ .cra_priority = 0,
+ .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
+ AESNI_ALIGN - 1,
+ .cra_alignmask = 0,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
+ .cipher = {
+ .cia_min_keysize = AES_MIN_KEY_SIZE,
+ .cia_max_keysize = AES_MAX_KEY_SIZE,
+ .cia_setkey = aes_set_key,
+ .cia_encrypt = __aes_encrypt,
+ .cia_decrypt = __aes_decrypt
+ }
+ }
+}, {
+ .cra_name = "__ecb-aes-aesni",
+ .cra_driver_name = "__driver-ecb-aes-aesni",
+ .cra_priority = 0,
+ .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
+ AESNI_ALIGN - 1,
+ .cra_alignmask = 0,
+ .cra_type = &crypto_blkcipher_type,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
+ .blkcipher = {
+ .min_keysize = AES_MIN_KEY_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE,
+ .setkey = aes_set_key,
+ .encrypt = ecb_encrypt,
+ .decrypt = ecb_decrypt,
+ },
+ },
+}, {
+ .cra_name = "__cbc-aes-aesni",
+ .cra_driver_name = "__driver-cbc-aes-aesni",
+ .cra_priority = 0,
+ .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
+ AESNI_ALIGN - 1,
+ .cra_alignmask = 0,
+ .cra_type = &crypto_blkcipher_type,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
+ .blkcipher = {
+ .min_keysize = AES_MIN_KEY_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE,
+ .setkey = aes_set_key,
+ .encrypt = cbc_encrypt,
+ .decrypt = cbc_decrypt,
+ },
+ },
+}, {
+ .cra_name = "ecb(aes)",
+ .cra_driver_name = "ecb-aes-aesni",
+ .cra_priority = 400,
+ .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct async_aes_ctx),
+ .cra_alignmask = 0,
+ .cra_type = &crypto_ablkcipher_type,
+ .cra_module = THIS_MODULE,
+ .cra_init = ablk_ecb_init,
+ .cra_exit = ablk_exit,
+ .cra_u = {
+ .ablkcipher = {
+ .min_keysize = AES_MIN_KEY_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE,
+ .setkey = ablk_set_key,
+ .encrypt = ablk_encrypt,
+ .decrypt = ablk_decrypt,
+ },
+ },
+}, {
+ .cra_name = "cbc(aes)",
+ .cra_driver_name = "cbc-aes-aesni",
+ .cra_priority = 400,
+ .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct async_aes_ctx),
+ .cra_alignmask = 0,
+ .cra_type = &crypto_ablkcipher_type,
+ .cra_module = THIS_MODULE,
+ .cra_init = ablk_cbc_init,
+ .cra_exit = ablk_exit,
+ .cra_u = {
+ .ablkcipher = {
+ .min_keysize = AES_MIN_KEY_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE,
+ .ivsize = AES_BLOCK_SIZE,
+ .setkey = ablk_set_key,
+ .encrypt = ablk_encrypt,
+ .decrypt = ablk_decrypt,
+ },
+ },
+#ifdef CONFIG_X86_64
+}, {
+ .cra_name = "__ctr-aes-aesni",
+ .cra_driver_name = "__driver-ctr-aes-aesni",
+ .cra_priority = 0,
+ .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
+ AESNI_ALIGN - 1,
+ .cra_alignmask = 0,
+ .cra_type = &crypto_blkcipher_type,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
+ .blkcipher = {
+ .min_keysize = AES_MIN_KEY_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE,
+ .ivsize = AES_BLOCK_SIZE,
+ .setkey = aes_set_key,
+ .encrypt = ctr_crypt,
+ .decrypt = ctr_crypt,
+ },
+ },
+}, {
+ .cra_name = "ctr(aes)",
+ .cra_driver_name = "ctr-aes-aesni",
+ .cra_priority = 400,
+ .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct async_aes_ctx),
+ .cra_alignmask = 0,
+ .cra_type = &crypto_ablkcipher_type,
+ .cra_module = THIS_MODULE,
+ .cra_init = ablk_ctr_init,
+ .cra_exit = ablk_exit,
+ .cra_u = {
+ .ablkcipher = {
+ .min_keysize = AES_MIN_KEY_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE,
+ .ivsize = AES_BLOCK_SIZE,
+ .setkey = ablk_set_key,
+ .encrypt = ablk_encrypt,
+ .decrypt = ablk_encrypt,
+ .geniv = "chainiv",
+ },
+ },
+}, {
.cra_name = "__gcm-aes-aesni",
.cra_driver_name = "__driver-gcm-aes-aesni",
.cra_priority = 0,
.cra_flags = CRYPTO_ALG_TYPE_AEAD,
.cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) + AESNI_ALIGN,
+ .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) +
+ AESNI_ALIGN,
.cra_alignmask = 0,
.cra_type = &crypto_aead_type,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(__rfc4106_alg.cra_list),
.cra_u = {
.aead = {
.encrypt = __driver_rfc4106_encrypt,
.decrypt = __driver_rfc4106_decrypt,
},
},
-};
+}, {
+ .cra_name = "rfc4106(gcm(aes))",
+ .cra_driver_name = "rfc4106-gcm-aesni",
+ .cra_priority = 400,
+ .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) +
+ AESNI_ALIGN,
+ .cra_alignmask = 0,
+ .cra_type = &crypto_nivaead_type,
+ .cra_module = THIS_MODULE,
+ .cra_init = rfc4106_init,
+ .cra_exit = rfc4106_exit,
+ .cra_u = {
+ .aead = {
+ .setkey = rfc4106_set_key,
+ .setauthsize = rfc4106_set_authsize,
+ .encrypt = rfc4106_encrypt,
+ .decrypt = rfc4106_decrypt,
+ .geniv = "seqiv",
+ .ivsize = 8,
+ .maxauthsize = 16,
+ },
+ },
+#ifdef HAS_CTR
+}, {
+ .cra_name = "rfc3686(ctr(aes))",
+ .cra_driver_name = "rfc3686-ctr-aes-aesni",
+ .cra_priority = 400,
+ .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
+ .cra_blocksize = 1,
+ .cra_ctxsize = sizeof(struct async_aes_ctx),
+ .cra_alignmask = 0,
+ .cra_type = &crypto_ablkcipher_type,
+ .cra_module = THIS_MODULE,
+ .cra_init = ablk_rfc3686_ctr_init,
+ .cra_exit = ablk_exit,
+ .cra_u = {
+ .ablkcipher = {
+ .min_keysize = AES_MIN_KEY_SIZE +
+ CTR_RFC3686_NONCE_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE +
+ CTR_RFC3686_NONCE_SIZE,
+ .ivsize = CTR_RFC3686_IV_SIZE,
+ .setkey = ablk_set_key,
+ .encrypt = ablk_encrypt,
+ .decrypt = ablk_decrypt,
+ .geniv = "seqiv",
+ },
+ },
+#endif
+#endif
+#ifdef HAS_LRW
+}, {
+ .cra_name = "lrw(aes)",
+ .cra_driver_name = "lrw-aes-aesni",
+ .cra_priority = 400,
+ .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct async_aes_ctx),
+ .cra_alignmask = 0,
+ .cra_type = &crypto_ablkcipher_type,
+ .cra_module = THIS_MODULE,
+ .cra_init = ablk_lrw_init,
+ .cra_exit = ablk_exit,
+ .cra_u = {
+ .ablkcipher = {
+ .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
+ .ivsize = AES_BLOCK_SIZE,
+ .setkey = ablk_set_key,
+ .encrypt = ablk_encrypt,
+ .decrypt = ablk_decrypt,
+ },
+ },
+#endif
+#ifdef HAS_PCBC
+}, {
+ .cra_name = "pcbc(aes)",
+ .cra_driver_name = "pcbc-aes-aesni",
+ .cra_priority = 400,
+ .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct async_aes_ctx),
+ .cra_alignmask = 0,
+ .cra_type = &crypto_ablkcipher_type,
+ .cra_module = THIS_MODULE,
+ .cra_init = ablk_pcbc_init,
+ .cra_exit = ablk_exit,
+ .cra_u = {
+ .ablkcipher = {
+ .min_keysize = AES_MIN_KEY_SIZE,
+ .max_keysize = AES_MAX_KEY_SIZE,
+ .ivsize = AES_BLOCK_SIZE,
+ .setkey = ablk_set_key,
+ .encrypt = ablk_encrypt,
+ .decrypt = ablk_decrypt,
+ },
+ },
#endif
+#ifdef HAS_XTS
+}, {
+ .cra_name = "xts(aes)",
+ .cra_driver_name = "xts-aes-aesni",
+ .cra_priority = 400,
+ .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
+ .cra_blocksize = AES_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct async_aes_ctx),
+ .cra_alignmask = 0,
+ .cra_type = &crypto_ablkcipher_type,
+ .cra_module = THIS_MODULE,
+ .cra_init = ablk_xts_init,
+ .cra_exit = ablk_exit,
+ .cra_u = {
+ .ablkcipher = {
+ .min_keysize = 2 * AES_MIN_KEY_SIZE,
+ .max_keysize = 2 * AES_MAX_KEY_SIZE,
+ .ivsize = AES_BLOCK_SIZE,
+ .setkey = ablk_set_key,
+ .encrypt = ablk_encrypt,
+ .decrypt = ablk_decrypt,
+ },
+ },
+#endif
+} };
static const struct x86_cpu_id aesni_cpu_id[] = {
@@ -1263,120 +1202,24 @@ MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
static int __init aesni_init(void)
{
- int err;
+ int err, i;
if (!x86_match_cpu(aesni_cpu_id))
return -ENODEV;
- if ((err = crypto_fpu_init()))
- goto fpu_err;
- if ((err = crypto_register_alg(&aesni_alg)))
- goto aes_err;
- if ((err = crypto_register_alg(&__aesni_alg)))
- goto __aes_err;
- if ((err = crypto_register_alg(&blk_ecb_alg)))
- goto blk_ecb_err;
- if ((err = crypto_register_alg(&blk_cbc_alg)))
- goto blk_cbc_err;
- if ((err = crypto_register_alg(&ablk_ecb_alg)))
- goto ablk_ecb_err;
- if ((err = crypto_register_alg(&ablk_cbc_alg)))
- goto ablk_cbc_err;
-#ifdef CONFIG_X86_64
- if ((err = crypto_register_alg(&blk_ctr_alg)))
- goto blk_ctr_err;
- if ((err = crypto_register_alg(&ablk_ctr_alg)))
- goto ablk_ctr_err;
- if ((err = crypto_register_alg(&__rfc4106_alg)))
- goto __aead_gcm_err;
- if ((err = crypto_register_alg(&rfc4106_alg)))
- goto aead_gcm_err;
-#ifdef HAS_CTR
- if ((err = crypto_register_alg(&ablk_rfc3686_ctr_alg)))
- goto ablk_rfc3686_ctr_err;
-#endif
-#endif
-#ifdef HAS_LRW
- if ((err = crypto_register_alg(&ablk_lrw_alg)))
- goto ablk_lrw_err;
-#endif
-#ifdef HAS_PCBC
- if ((err = crypto_register_alg(&ablk_pcbc_alg)))
- goto ablk_pcbc_err;
-#endif
-#ifdef HAS_XTS
- if ((err = crypto_register_alg(&ablk_xts_alg)))
- goto ablk_xts_err;
-#endif
- return err;
+ err = crypto_fpu_init();
+ if (err)
+ return err;
-#ifdef HAS_XTS
-ablk_xts_err:
-#endif
-#ifdef HAS_PCBC
- crypto_unregister_alg(&ablk_pcbc_alg);
-ablk_pcbc_err:
-#endif
-#ifdef HAS_LRW
- crypto_unregister_alg(&ablk_lrw_alg);
-ablk_lrw_err:
-#endif
-#ifdef CONFIG_X86_64
-#ifdef HAS_CTR
- crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
-ablk_rfc3686_ctr_err:
-#endif
- crypto_unregister_alg(&rfc4106_alg);
-aead_gcm_err:
- crypto_unregister_alg(&__rfc4106_alg);
-__aead_gcm_err:
- crypto_unregister_alg(&ablk_ctr_alg);
-ablk_ctr_err:
- crypto_unregister_alg(&blk_ctr_alg);
-blk_ctr_err:
-#endif
- crypto_unregister_alg(&ablk_cbc_alg);
-ablk_cbc_err:
- crypto_unregister_alg(&ablk_ecb_alg);
-ablk_ecb_err:
- crypto_unregister_alg(&blk_cbc_alg);
-blk_cbc_err:
- crypto_unregister_alg(&blk_ecb_alg);
-blk_ecb_err:
- crypto_unregister_alg(&__aesni_alg);
-__aes_err:
- crypto_unregister_alg(&aesni_alg);
-aes_err:
-fpu_err:
- return err;
+ for (i = 0; i < ARRAY_SIZE(aesni_algs); i++)
+ INIT_LIST_HEAD(&aesni_algs[i].cra_list);
+
+ return crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
}
static void __exit aesni_exit(void)
{
-#ifdef HAS_XTS
- crypto_unregister_alg(&ablk_xts_alg);
-#endif
-#ifdef HAS_PCBC
- crypto_unregister_alg(&ablk_pcbc_alg);
-#endif
-#ifdef HAS_LRW
- crypto_unregister_alg(&ablk_lrw_alg);
-#endif
-#ifdef CONFIG_X86_64
-#ifdef HAS_CTR
- crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
-#endif
- crypto_unregister_alg(&rfc4106_alg);
- crypto_unregister_alg(&__rfc4106_alg);
- crypto_unregister_alg(&ablk_ctr_alg);
- crypto_unregister_alg(&blk_ctr_alg);
-#endif
- crypto_unregister_alg(&ablk_cbc_alg);
- crypto_unregister_alg(&ablk_ecb_alg);
- crypto_unregister_alg(&blk_cbc_alg);
- crypto_unregister_alg(&blk_ecb_alg);
- crypto_unregister_alg(&__aesni_alg);
- crypto_unregister_alg(&aesni_alg);
+ crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
crypto_fpu_exit();
}