Switch from the old AES library functions (which use struct
crypto_aes_ctx) to the new ones (which use struct aes_enckey).  This
eliminates the unnecessary computation and caching of the decryption
round keys.  The new AES en/decryption functions are also much faster
and use AES instructions when supported by the CPU.

Note: aes_encrypt_new() will be renamed to aes_encrypt() once all
callers of the old aes_encrypt() have been updated.

Signed-off-by: Eric Biggers <[email protected]>
---
 arch/arm64/crypto/ghash-ce-glue.c | 29 ++++++++---------------------
 1 file changed, 8 insertions(+), 21 deletions(-)

diff --git a/arch/arm64/crypto/ghash-ce-glue.c 
b/arch/arm64/crypto/ghash-ce-glue.c
index ef249d06c92c..bfd38e485e77 100644
--- a/arch/arm64/crypto/ghash-ce-glue.c
+++ b/arch/arm64/crypto/ghash-ce-glue.c
@@ -38,11 +38,11 @@ struct ghash_key {
 struct arm_ghash_desc_ctx {
        u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
 };
 
 struct gcm_aes_ctx {
-       struct crypto_aes_ctx   aes_key;
+       struct aes_enckey       aes_key;
        u8                      nonce[RFC4106_NONCE_SIZE];
        struct ghash_key        ghash_key;
 };
 
 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
@@ -184,35 +184,23 @@ static struct shash_alg ghash_alg = {
        .import                 = ghash_import,
        .descsize               = sizeof(struct arm_ghash_desc_ctx),
        .statesize              = sizeof(struct ghash_desc_ctx),
 };
 
-static int num_rounds(struct crypto_aes_ctx *ctx)
-{
-       /*
-        * # of rounds specified by AES:
-        * 128 bit key          10 rounds
-        * 192 bit key          12 rounds
-        * 256 bit key          14 rounds
-        * => n byte key        => 6 + (n/4) rounds
-        */
-       return 6 + ctx->key_length / 4;
-}
-
 static int gcm_aes_setkey(struct crypto_aead *tfm, const u8 *inkey,
                          unsigned int keylen)
 {
        struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm);
        u8 key[GHASH_BLOCK_SIZE];
        be128 h;
        int ret;
 
-       ret = aes_expandkey(&ctx->aes_key, inkey, keylen);
+       ret = aes_prepareenckey(&ctx->aes_key, inkey, keylen);
        if (ret)
                return -EINVAL;
 
-       aes_encrypt(&ctx->aes_key, key, (u8[AES_BLOCK_SIZE]){});
+       aes_encrypt_new(&ctx->aes_key, key, (u8[AES_BLOCK_SIZE]){});
 
        /* needed for the fallback */
        memcpy(&ctx->ghash_key.k, key, GHASH_BLOCK_SIZE);
 
        ghash_reflect(ctx->ghash_key.h[0], &ctx->ghash_key.k);
@@ -294,11 +282,10 @@ static void gcm_calculate_auth_mac(struct aead_request 
*req, u64 dg[], u32 len)
 
 static int gcm_encrypt(struct aead_request *req, char *iv, int assoclen)
 {
        struct crypto_aead *aead = crypto_aead_reqtfm(req);
        struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
-       int nrounds = num_rounds(&ctx->aes_key);
        struct skcipher_walk walk;
        u8 buf[AES_BLOCK_SIZE];
        u64 dg[2] = {};
        be128 lengths;
        u8 *tag;
@@ -329,12 +316,12 @@ static int gcm_encrypt(struct aead_request *req, char 
*iv, int assoclen)
                        tag = NULL;
                }
 
                scoped_ksimd()
                        pmull_gcm_encrypt(nbytes, dst, src, ctx->ghash_key.h,
-                                         dg, iv, ctx->aes_key.key_enc, nrounds,
-                                         tag);
+                                         dg, iv, ctx->aes_key.k.rndkeys,
+                                         ctx->aes_key.nrounds, tag);
 
                if (unlikely(!nbytes))
                        break;
 
                if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE))
@@ -357,11 +344,10 @@ static int gcm_encrypt(struct aead_request *req, char 
*iv, int assoclen)
 static int gcm_decrypt(struct aead_request *req, char *iv, int assoclen)
 {
        struct crypto_aead *aead = crypto_aead_reqtfm(req);
        struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
        unsigned int authsize = crypto_aead_authsize(aead);
-       int nrounds = num_rounds(&ctx->aes_key);
        struct skcipher_walk walk;
        u8 otag[AES_BLOCK_SIZE];
        u8 buf[AES_BLOCK_SIZE];
        u64 dg[2] = {};
        be128 lengths;
@@ -399,12 +385,13 @@ static int gcm_decrypt(struct aead_request *req, char 
*iv, int assoclen)
                }
 
                scoped_ksimd()
                        ret = pmull_gcm_decrypt(nbytes, dst, src,
                                                ctx->ghash_key.h,
-                                               dg, iv, ctx->aes_key.key_enc,
-                                               nrounds, tag, otag, authsize);
+                                               dg, iv, ctx->aes_key.k.rndkeys,
+                                               ctx->aes_key.nrounds, tag, otag,
+                                               authsize);
 
                if (unlikely(!nbytes))
                        break;
 
                if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE))
-- 
2.52.0


Reply via email to