From: Andrei Botila <andrei.bot...@nxp.com>

Newer CAAM versions (Era 9+) support 16B IVs. Since for these devices
the HW limitation is no longer present newer version should process the
requests containing 16B IVs directly in hardware without using a fallback.

Signed-off-by: Andrei Botila <andrei.bot...@nxp.com>
---
 drivers/crypto/caam/caamalg.c      | 12 ++++++++----
 drivers/crypto/caam/caamalg_desc.c | 27 ++++++++++++++++-----------
 2 files changed, 24 insertions(+), 15 deletions(-)

diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c
index e72aa3e2e065..cf5bd7666dfc 100644
--- a/drivers/crypto/caam/caamalg.c
+++ b/drivers/crypto/caam/caamalg.c
@@ -834,6 +834,7 @@ static int xts_skcipher_setkey(struct crypto_skcipher 
*skcipher, const u8 *key,
 {
        struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
        struct device *jrdev = ctx->jrdev;
+       struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
        u32 *desc;
        int err;
 
@@ -846,9 +847,11 @@ static int xts_skcipher_setkey(struct crypto_skcipher 
*skcipher, const u8 *key,
        if (keylen != 2 * AES_KEYSIZE_128 && keylen != 2 * AES_KEYSIZE_256)
                ctx->xts_key_fallback = true;
 
-       err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
-       if (err)
-               return err;
+       if (ctrlpriv->era <= 8 || ctx->xts_key_fallback) {
+               err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
+               if (err)
+                       return err;
+       }
 
        ctx->cdata.keylen = keylen;
        ctx->cdata.key_virt = key;
@@ -1784,6 +1787,7 @@ static inline int skcipher_crypt(struct skcipher_request 
*req, bool encrypt)
        struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
        struct device *jrdev = ctx->jrdev;
        struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
+       struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
        u32 *desc;
        int ret = 0;
 
@@ -1795,7 +1799,7 @@ static inline int skcipher_crypt(struct skcipher_request 
*req, bool encrypt)
        if (!req->cryptlen && !ctx->fallback)
                return 0;
 
-       if (ctx->fallback && (xts_skcipher_ivsize(req) ||
+       if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) 
||
                              ctx->xts_key_fallback)) {
                struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
 
diff --git a/drivers/crypto/caam/caamalg_desc.c 
b/drivers/crypto/caam/caamalg_desc.c
index f0f0fdd1ef32..7571e1ac913b 100644
--- a/drivers/crypto/caam/caamalg_desc.c
+++ b/drivers/crypto/caam/caamalg_desc.c
@@ -1551,13 +1551,14 @@ void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, 
struct alginfo *cdata)
        set_jump_tgt_here(desc, key_jump_cmd);
 
        /*
-        * create sequence for loading the sector index
-        * Upper 8B of IV - will be used as sector index
-        * Lower 8B of IV - will be discarded
+        * create sequence for loading the sector index / 16B tweak value
+        * Lower 8B of IV - sector index / tweak lower half
+        * Upper 8B of IV - upper half of 16B tweak
         */
        append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
                        (0x20 << LDST_OFFSET_SHIFT));
-       append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
+       append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
+                       (0x30 << LDST_OFFSET_SHIFT));
 
        /* Load operation */
        append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
@@ -1566,9 +1567,11 @@ void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, 
struct alginfo *cdata)
        /* Perform operation */
        skcipher_append_src_dst(desc);
 
-       /* Store upper 8B of IV */
+       /* Store lower 8B and upper 8B of IV */
        append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
                         (0x20 << LDST_OFFSET_SHIFT));
+       append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
+                        (0x30 << LDST_OFFSET_SHIFT));
 
        print_hex_dump_debug("xts skcipher enc shdesc@" __stringify(__LINE__)
                             ": ", DUMP_PREFIX_ADDRESS, 16, 4,
@@ -1610,23 +1613,25 @@ void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, 
struct alginfo *cdata)
        set_jump_tgt_here(desc, key_jump_cmd);
 
        /*
-        * create sequence for loading the sector index
-        * Upper 8B of IV - will be used as sector index
-        * Lower 8B of IV - will be discarded
+        * create sequence for loading the sector index / 16B tweak value
+        * Lower 8B of IV - sector index / tweak lower half
+        * Upper 8B of IV - upper half of 16B tweak
         */
        append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
                        (0x20 << LDST_OFFSET_SHIFT));
-       append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
-
+       append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
+                       (0x30 << LDST_OFFSET_SHIFT));
        /* Load operation */
        append_dec_op1(desc, cdata->algtype);
 
        /* Perform operation */
        skcipher_append_src_dst(desc);
 
-       /* Store upper 8B of IV */
+       /* Store lower 8B and upper 8B of IV */
        append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
                         (0x20 << LDST_OFFSET_SHIFT));
+       append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
+                        (0x30 << LDST_OFFSET_SHIFT));
 
        print_hex_dump_debug("xts skcipher dec shdesc@" __stringify(__LINE__)
                             ": ", DUMP_PREFIX_ADDRESS, 16, 4, desc,
-- 
2.17.1

Reply via email to