crypto: caam/jr - add fallback for XTS with more than 8B IV

A hardware limitation exists for CAAM until Era 9 which restricts
the accelerator to IVs with only 8 bytes. When CAAM has a lower era
a fallback is necessary to process 16 bytes IV.

Fixes: c6415a6016 ("crypto: caam - add support for acipher xts(aes)")
Cc: <stable@vger.kernel.org> # v4.4+
Signed-off-by: Andrei Botila <andrei.botila@nxp.com>
Reviewed-by: Horia Geantă <horia.geanta@nxp.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Andrei Botila 2020-09-22 19:03:19 +03:00 committed by Herbert Xu
parent 17f86c5b6b
commit 9d9b14dbe0
2 changed files with 66 additions and 7 deletions

View File

@ -101,6 +101,7 @@ config CRYPTO_DEV_FSL_CAAM_CRYPTO_API
select CRYPTO_AUTHENC select CRYPTO_AUTHENC
select CRYPTO_SKCIPHER select CRYPTO_SKCIPHER
select CRYPTO_LIB_DES select CRYPTO_LIB_DES
select CRYPTO_XTS
help help
Selecting this will offload crypto for users of the Selecting this will offload crypto for users of the
scatterlist crypto API (such as the linux native IPSec scatterlist crypto API (such as the linux native IPSec

View File

@ -57,6 +57,7 @@
#include "key_gen.h" #include "key_gen.h"
#include "caamalg_desc.h" #include "caamalg_desc.h"
#include <crypto/engine.h> #include <crypto/engine.h>
#include <asm/unaligned.h>
/* /*
* crypto alg * crypto alg
@ -114,10 +115,12 @@ struct caam_ctx {
struct alginfo adata; struct alginfo adata;
struct alginfo cdata; struct alginfo cdata;
unsigned int authsize; unsigned int authsize;
struct crypto_skcipher *fallback;
}; };
struct caam_skcipher_req_ctx { struct caam_skcipher_req_ctx {
struct skcipher_edesc *edesc; struct skcipher_edesc *edesc;
struct skcipher_request fallback_req;
}; };
struct caam_aead_req_ctx { struct caam_aead_req_ctx {
@ -830,12 +833,17 @@ static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
struct device *jrdev = ctx->jrdev; struct device *jrdev = ctx->jrdev;
u32 *desc; u32 *desc;
int err;
if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
dev_dbg(jrdev, "key size mismatch\n"); dev_dbg(jrdev, "key size mismatch\n");
return -EINVAL; return -EINVAL;
} }
err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
if (err)
return err;
ctx->cdata.keylen = keylen; ctx->cdata.keylen = keylen;
ctx->cdata.key_virt = key; ctx->cdata.key_virt = key;
ctx->cdata.key_inline = true; ctx->cdata.key_inline = true;
@ -1755,6 +1763,14 @@ static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)
return ret; return ret;
} }
static inline bool xts_skcipher_ivsize(struct skcipher_request *req)
{
struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
return !!get_unaligned((u64 *)(req->iv + (ivsize / 2)));
}
static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt) static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
{ {
struct skcipher_edesc *edesc; struct skcipher_edesc *edesc;
@ -1768,6 +1784,21 @@ static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
if (!req->cryptlen) if (!req->cryptlen)
return 0; return 0;
if (ctx->fallback && xts_skcipher_ivsize(req)) {
struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
skcipher_request_set_callback(&rctx->fallback_req,
req->base.flags,
req->base.complete,
req->base.data);
skcipher_request_set_crypt(&rctx->fallback_req, req->src,
req->dst, req->cryptlen, req->iv);
return encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) :
crypto_skcipher_decrypt(&rctx->fallback_req);
}
/* allocate extended descriptor */ /* allocate extended descriptor */
edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
if (IS_ERR(edesc)) if (IS_ERR(edesc))
@ -1905,6 +1936,7 @@ static struct caam_skcipher_alg driver_algs[] = {
.base = { .base = {
.cra_name = "xts(aes)", .cra_name = "xts(aes)",
.cra_driver_name = "xts-aes-caam", .cra_driver_name = "xts-aes-caam",
.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
}, },
.setkey = xts_skcipher_setkey, .setkey = xts_skcipher_setkey,
@ -3344,13 +3376,35 @@ static int caam_cra_init(struct crypto_skcipher *tfm)
struct caam_skcipher_alg *caam_alg = struct caam_skcipher_alg *caam_alg =
container_of(alg, typeof(*caam_alg), skcipher); container_of(alg, typeof(*caam_alg), skcipher);
struct caam_ctx *ctx = crypto_skcipher_ctx(tfm); struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx)); int ret = 0;
ctx->enginectx.op.do_one_request = skcipher_do_one_req; ctx->enginectx.op.do_one_request = skcipher_do_one_req;
return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam, if (alg_aai == OP_ALG_AAI_XTS) {
false); const char *tfm_name = crypto_tfm_alg_name(&tfm->base);
struct crypto_skcipher *fallback;
fallback = crypto_alloc_skcipher(tfm_name, 0,
CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(fallback)) {
dev_err(ctx->jrdev, "Failed to allocate %s fallback: %ld\n",
tfm_name, PTR_ERR(fallback));
return PTR_ERR(fallback);
}
ctx->fallback = fallback;
crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx) +
crypto_skcipher_reqsize(fallback));
} else {
crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx));
}
ret = caam_init_common(ctx, &caam_alg->caam, false);
if (ret && ctx->fallback)
crypto_free_skcipher(ctx->fallback);
return ret;
} }
static int caam_aead_init(struct crypto_aead *tfm) static int caam_aead_init(struct crypto_aead *tfm)
@ -3378,7 +3432,11 @@ static void caam_exit_common(struct caam_ctx *ctx)
static void caam_cra_exit(struct crypto_skcipher *tfm) static void caam_cra_exit(struct crypto_skcipher *tfm)
{ {
caam_exit_common(crypto_skcipher_ctx(tfm)); struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
if (ctx->fallback)
crypto_free_skcipher(ctx->fallback);
caam_exit_common(ctx);
} }
static void caam_aead_exit(struct crypto_aead *tfm) static void caam_aead_exit(struct crypto_aead *tfm)
@ -3412,8 +3470,8 @@ static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
alg->base.cra_module = THIS_MODULE; alg->base.cra_module = THIS_MODULE;
alg->base.cra_priority = CAAM_CRA_PRIORITY; alg->base.cra_priority = CAAM_CRA_PRIORITY;
alg->base.cra_ctxsize = sizeof(struct caam_ctx); alg->base.cra_ctxsize = sizeof(struct caam_ctx);
alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY; CRYPTO_ALG_KERN_DRIVER_ONLY);
alg->init = caam_cra_init; alg->init = caam_cra_init;
alg->exit = caam_cra_exit; alg->exit = caam_cra_exit;