Hi,

> --- a/drivers/crypto/fsl/fsl_blob.c
> +++ b/drivers/crypto/fsl/fsl_blob.c
> @@ -1,6 +1,7 @@
>  // SPDX-License-Identifier: GPL-2.0+
>  /*
>   * Copyright 2014 Freescale Semiconductor, Inc.
> + * Copyright 2021 NXP
>   *
>   */
>  
> @@ -152,6 +153,87 @@ int blob_encap(u8 *key_mod, u8 *src, u8 *dst, u32 len)
>       return ret;
>  }
>  
> +int derive_blob_kek(u8 *bkek_buf, u8 *key_mod, u32 key_sz)

where is this function actually used? looks like dead code to me.

> +{
> +     int ret, size;
> +     u32 *desc;
> +
> +     if (!IS_ALIGNED((uintptr_t)bkek_buf, ARCH_DMA_MINALIGN) ||
> +         !IS_ALIGNED((uintptr_t)key_mod, ARCH_DMA_MINALIGN)) {
> +             puts("Error: derive_bkek: Address arguments are not 
> aligned!\n");
> +             return -EINVAL;
> +     }
> +
> +     printf("\nBlob key encryption key(bkek)\n");
> +     desc = malloc_cache_aligned(sizeof(int) * MAX_CAAM_DESCSIZE);
> +     if (!desc) {
> +             printf("Not enough memory for descriptor allocation\n");
> +             return -ENOMEM;
> +     }
> +
> +     size = ALIGN(key_sz, ARCH_DMA_MINALIGN);
> +     flush_dcache_range((unsigned long)key_mod, (unsigned long)key_mod + 
> size);
> +
> +     /* construct blob key encryption key(bkek) derive descriptor */
> +     inline_cnstr_jobdesc_derive_bkek(desc, bkek_buf, key_mod, key_sz);
> +
> +     size = ALIGN(sizeof(int) * MAX_CAAM_DESCSIZE, ARCH_DMA_MINALIGN);
> +     flush_dcache_range((unsigned long)desc, (unsigned long)desc + size);
> +     size = ALIGN(BKEK_SIZE, ARCH_DMA_MINALIGN);
> +     invalidate_dcache_range((unsigned long)bkek_buf,
> +                             (unsigned long)bkek_buf + size);
> +
> +     /* run descriptor */
> +     ret = run_descriptor_jr(desc);
> +     if (ret < 0) {
> +             printf("Error: %s failed 0x%x\n", __func__, ret);
> +     } else {
> +             invalidate_dcache_range((unsigned long)bkek_buf,
> +                                     (unsigned long)bkek_buf + size);
> +             puts("derive bkek successful.\n");
> +     }
> +
> +     free(desc);
> +     return ret;
> +}
> +
> +int hwrng_generate(u8 *dst, u32 len)

likewise.
But more important what is the difference to drivers/crypto/fsl/rng.c? Why
do you need a new function here?

> +{
> +     int ret, size;
> +     u32 *desc;
> +
> +     if (!IS_ALIGNED((uintptr_t)dst, ARCH_DMA_MINALIGN)) {
> +             puts("Error: caam_hwrng_test: Address arguments are not 
> aligned!\n");
> +             return -EINVAL;
> +     }
> +
> +     printf("\nRNG generate\n");
> +     desc = malloc_cache_aligned(sizeof(int) * MAX_CAAM_DESCSIZE);
> +     if (!desc) {
> +             printf("Not enough memory for descriptor allocation\n");
> +             return -ENOMEM;
> +     }
> +
> +     inline_cnstr_jobdesc_rng(desc, dst, len);
> +
> +     size = ALIGN(sizeof(int) * MAX_CAAM_DESCSIZE, ARCH_DMA_MINALIGN);
> +     flush_dcache_range((unsigned long)desc, (unsigned long)desc + size);
> +     size = ALIGN(len, ARCH_DMA_MINALIGN);
> +     invalidate_dcache_range((unsigned long)dst, (unsigned long)dst + size);
> +
> +     ret = run_descriptor_jr(desc);
> +     if (ret < 0) {
> +             printf("Error: RNG generate failed 0x%x\n", ret);
> +     } else {
> +             invalidate_dcache_range((unsigned long)dst,
> +                                     (unsigned long)dst + size);
> +             puts("RNG generation successful.\n");
> +     }
> +
> +     free(desc);
> +     return ret;
> +}
> +
>  #ifdef CONFIG_CMD_DEKBLOB
>  int blob_dek(const u8 *src, u8 *dst, u8 len)
>  {

-michael

Reply via email to