On Wed, Oct 22, 2014 at 09:15:32AM +0200, Ard Biesheuvel wrote:
This patch implements the AES key schedule generation using ARMv8
Crypto Instructions. It replaces the table based C implementation
in aes_generic.ko, which means we can drop the dependency on that
module.
Signed-off-by: Ard Biesheuvel ard.biesheu...@linaro.org
I've given this a test on Juno running 3.18-rc2.
I disabled CONFIG_CRYPTO_MANAGER_DISABLE_TESTS, and now /proc/crypto
indicates self-test results of passed.
Also, I ran the tcrypt test module with the following parameters:
modprobe tcrypt mode=10
modprobe tcrypt mode=37
modprobe tcrypt mode=45
No failures were reported in dmesg.
For extra fun I applied dynamic ftrace probes to ce_aes_expandkey and
ce_aes_setkey; and for each test I ran, at least one of those probes
fired.
So for this patch:
Tested-by: Steve Capper steve.cap...@linaro.org
The patch looks reasonable to me (apart from some checkpatch warnings
regarding trailing whitespace), so if it helps things:
Acked-by: Steve Capper steve.cap...@linaro.org
---
arch/arm64/crypto/Kconfig | 5 +-
arch/arm64/crypto/aes-ce-ccm-glue.c | 4 +-
arch/arm64/crypto/aes-ce-cipher.c | 112
+++-
arch/arm64/crypto/aes-ce-setkey.h | 5 ++
arch/arm64/crypto/aes-glue.c| 18 --
5 files changed, 133 insertions(+), 11 deletions(-)
create mode 100644 arch/arm64/crypto/aes-ce-setkey.h
diff --git a/arch/arm64/crypto/Kconfig b/arch/arm64/crypto/Kconfig
index 5562652c5316..a38b02ce5f9a 100644
--- a/arch/arm64/crypto/Kconfig
+++ b/arch/arm64/crypto/Kconfig
@@ -27,20 +27,19 @@ config CRYPTO_AES_ARM64_CE
tristate AES core cipher using ARMv8 Crypto Extensions
depends on ARM64 KERNEL_MODE_NEON
select CRYPTO_ALGAPI
- select CRYPTO_AES
config CRYPTO_AES_ARM64_CE_CCM
tristate AES in CCM mode using ARMv8 Crypto Extensions
depends on ARM64 KERNEL_MODE_NEON
select CRYPTO_ALGAPI
- select CRYPTO_AES
+ select CRYPTO_AES_ARM64_CE
select CRYPTO_AEAD
config CRYPTO_AES_ARM64_CE_BLK
tristate AES in ECB/CBC/CTR/XTS modes using ARMv8 Crypto Extensions
depends on ARM64 KERNEL_MODE_NEON
select CRYPTO_BLKCIPHER
- select CRYPTO_AES
+ select CRYPTO_AES_ARM64_CE
select CRYPTO_ABLK_HELPER
config CRYPTO_AES_ARM64_NEON_BLK
diff --git a/arch/arm64/crypto/aes-ce-ccm-glue.c
b/arch/arm64/crypto/aes-ce-ccm-glue.c
index 9e6cdde9b43d..0ac73b838fa3 100644
--- a/arch/arm64/crypto/aes-ce-ccm-glue.c
+++ b/arch/arm64/crypto/aes-ce-ccm-glue.c
@@ -16,6 +16,8 @@
#include linux/crypto.h
#include linux/module.h
+#include aes-ce-setkey.h
+
static int num_rounds(struct crypto_aes_ctx *ctx)
{
/*
@@ -48,7 +50,7 @@ static int ccm_setkey(struct crypto_aead *tfm, const u8
*in_key,
struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
int ret;
- ret = crypto_aes_expand_key(ctx, in_key, key_len);
+ ret = ce_aes_expandkey(ctx, in_key, key_len);
if (!ret)
return 0;
diff --git a/arch/arm64/crypto/aes-ce-cipher.c
b/arch/arm64/crypto/aes-ce-cipher.c
index 2075e1acae6b..4207c83389d3 100644
--- a/arch/arm64/crypto/aes-ce-cipher.c
+++ b/arch/arm64/crypto/aes-ce-cipher.c
@@ -14,6 +14,8 @@
#include linux/crypto.h
#include linux/module.h
+#include aes-ce-setkey.h
+
MODULE_DESCRIPTION(Synchronous AES cipher using ARMv8 Crypto Extensions);
MODULE_AUTHOR(Ard Biesheuvel ard.biesheu...@linaro.org);
MODULE_LICENSE(GPL v2);
@@ -124,6 +126,114 @@ static void aes_cipher_decrypt(struct crypto_tfm *tfm,
u8 dst[], u8 const src[])
kernel_neon_end();
}
+/*
+ * aes_sub() - use the aese instruction to perform the AES sbox substitution
+ * on each byte in 'input'
+ */
+static u32 aes_sub(u32 input)
+{
+ u32 ret;
+
+ __asm__(dupv1.4s, %w[in] ;
+ movi v0.16b, #0 ;
+ aese v0.16b, v1.16b ;
+ umov %w[out], v0.4s[0] ;
+
+ : [out] =r(ret)
+ : [in]r(input)
+ : v0,v1);
+
+ return ret;
+}
I like this use of named arguments in the inline asm.
+
+int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
+ unsigned int key_len)
+{
+ /*
+ * The AES key schedule round constants
+ */
+ static u8 const rcon[] = {
+ 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
+ };
+
+ u32 kwords = key_len / sizeof(u32);
+ struct aes_block *key_enc, *key_dec;
+ int i, j;
+
+ if (key_len != AES_KEYSIZE_128
+ key_len != AES_KEYSIZE_192
+ key_len != AES_KEYSIZE_256)
+ return -EINVAL;
+
+ memcpy(ctx-key_enc, in_key, key_len);
+ ctx-key_length = key_len;
+
+ kernel_neon_begin_partial(2);
+ for (i = 0; i sizeof(rcon); i