Commit 36e2d7cf authored by Andrei Botila's avatar Andrei Botila Committed by Herbert Xu

crypto: caam/qi2 - add fallback for XTS with more than 8B IV

A hardware limitation exists for CAAM until Era 9 which restricts
the accelerator to IVs with only 8 bytes. When CAAM has a lower era
a fallback is necessary to process 16 bytes IV.

Fixes: 226853ac ("crypto: caam/qi2 - add skcipher algorithms")
Cc: <stable@vger.kernel.org> # v4.20+
Signed-off-by: default avatarAndrei Botila <andrei.botila@nxp.com>
Reviewed-by: default avatarHoria Geantă <horia.geanta@nxp.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 83e8aa91
...@@ -167,6 +167,7 @@ config CRYPTO_DEV_FSL_DPAA2_CAAM ...@@ -167,6 +167,7 @@ config CRYPTO_DEV_FSL_DPAA2_CAAM
select CRYPTO_AEAD select CRYPTO_AEAD
select CRYPTO_HASH select CRYPTO_HASH
select CRYPTO_DES select CRYPTO_DES
select CRYPTO_XTS
help help
CAAM driver for QorIQ Data Path Acceleration Architecture 2. CAAM driver for QorIQ Data Path Acceleration Architecture 2.
It handles DPSECI DPAA2 objects that sit on the Management Complex It handles DPSECI DPAA2 objects that sit on the Management Complex
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
#include <linux/fsl/mc.h> #include <linux/fsl/mc.h>
#include <soc/fsl/dpaa2-io.h> #include <soc/fsl/dpaa2-io.h>
#include <soc/fsl/dpaa2-fd.h> #include <soc/fsl/dpaa2-fd.h>
#include <asm/unaligned.h>
#define CAAM_CRA_PRIORITY 2000 #define CAAM_CRA_PRIORITY 2000
...@@ -80,6 +81,7 @@ struct caam_ctx { ...@@ -80,6 +81,7 @@ struct caam_ctx {
struct alginfo adata; struct alginfo adata;
struct alginfo cdata; struct alginfo cdata;
unsigned int authsize; unsigned int authsize;
struct crypto_skcipher *fallback;
}; };
static void *dpaa2_caam_iova_to_virt(struct dpaa2_caam_priv *priv, static void *dpaa2_caam_iova_to_virt(struct dpaa2_caam_priv *priv,
...@@ -1056,12 +1058,17 @@ static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, ...@@ -1056,12 +1058,17 @@ static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
struct device *dev = ctx->dev; struct device *dev = ctx->dev;
struct caam_flc *flc; struct caam_flc *flc;
u32 *desc; u32 *desc;
int err;
if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
dev_dbg(dev, "key size mismatch\n"); dev_dbg(dev, "key size mismatch\n");
return -EINVAL; return -EINVAL;
} }
err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
if (err)
return err;
ctx->cdata.keylen = keylen; ctx->cdata.keylen = keylen;
ctx->cdata.key_virt = key; ctx->cdata.key_virt = key;
ctx->cdata.key_inline = true; ctx->cdata.key_inline = true;
...@@ -1443,6 +1450,14 @@ static void skcipher_decrypt_done(void *cbk_ctx, u32 status) ...@@ -1443,6 +1450,14 @@ static void skcipher_decrypt_done(void *cbk_ctx, u32 status)
skcipher_request_complete(req, ecode); skcipher_request_complete(req, ecode);
} }
static inline bool xts_skcipher_ivsize(struct skcipher_request *req)
{
struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
return !!get_unaligned((u64 *)(req->iv + (ivsize / 2)));
}
static int skcipher_encrypt(struct skcipher_request *req) static int skcipher_encrypt(struct skcipher_request *req)
{ {
struct skcipher_edesc *edesc; struct skcipher_edesc *edesc;
...@@ -1454,6 +1469,18 @@ static int skcipher_encrypt(struct skcipher_request *req) ...@@ -1454,6 +1469,18 @@ static int skcipher_encrypt(struct skcipher_request *req)
if (!req->cryptlen) if (!req->cryptlen)
return 0; return 0;
if (ctx->fallback && xts_skcipher_ivsize(req)) {
skcipher_request_set_tfm(&caam_req->fallback_req, ctx->fallback);
skcipher_request_set_callback(&caam_req->fallback_req,
req->base.flags,
req->base.complete,
req->base.data);
skcipher_request_set_crypt(&caam_req->fallback_req, req->src,
req->dst, req->cryptlen, req->iv);
return crypto_skcipher_encrypt(&caam_req->fallback_req);
}
/* allocate extended descriptor */ /* allocate extended descriptor */
edesc = skcipher_edesc_alloc(req); edesc = skcipher_edesc_alloc(req);
if (IS_ERR(edesc)) if (IS_ERR(edesc))
...@@ -1484,6 +1511,19 @@ static int skcipher_decrypt(struct skcipher_request *req) ...@@ -1484,6 +1511,19 @@ static int skcipher_decrypt(struct skcipher_request *req)
if (!req->cryptlen) if (!req->cryptlen)
return 0; return 0;
if (ctx->fallback && xts_skcipher_ivsize(req)) {
skcipher_request_set_tfm(&caam_req->fallback_req, ctx->fallback);
skcipher_request_set_callback(&caam_req->fallback_req,
req->base.flags,
req->base.complete,
req->base.data);
skcipher_request_set_crypt(&caam_req->fallback_req, req->src,
req->dst, req->cryptlen, req->iv);
return crypto_skcipher_decrypt(&caam_req->fallback_req);
}
/* allocate extended descriptor */ /* allocate extended descriptor */
edesc = skcipher_edesc_alloc(req); edesc = skcipher_edesc_alloc(req);
if (IS_ERR(edesc)) if (IS_ERR(edesc))
...@@ -1537,9 +1577,34 @@ static int caam_cra_init_skcipher(struct crypto_skcipher *tfm) ...@@ -1537,9 +1577,34 @@ static int caam_cra_init_skcipher(struct crypto_skcipher *tfm)
struct skcipher_alg *alg = crypto_skcipher_alg(tfm); struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
struct caam_skcipher_alg *caam_alg = struct caam_skcipher_alg *caam_alg =
container_of(alg, typeof(*caam_alg), skcipher); container_of(alg, typeof(*caam_alg), skcipher);
struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
int ret = 0;
if (alg_aai == OP_ALG_AAI_XTS) {
const char *tfm_name = crypto_tfm_alg_name(&tfm->base);
struct crypto_skcipher *fallback;
fallback = crypto_alloc_skcipher(tfm_name, 0,
CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(fallback)) {
dev_err(ctx->dev, "Failed to allocate %s fallback: %ld\n",
tfm_name, PTR_ERR(fallback));
return PTR_ERR(fallback);
}
ctx->fallback = fallback;
crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_request) +
crypto_skcipher_reqsize(fallback));
} else {
crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_request));
}
crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_request)); ret = caam_cra_init(ctx, &caam_alg->caam, false);
return caam_cra_init(crypto_skcipher_ctx(tfm), &caam_alg->caam, false); if (ret && ctx->fallback)
crypto_free_skcipher(ctx->fallback);
return ret;
} }
static int caam_cra_init_aead(struct crypto_aead *tfm) static int caam_cra_init_aead(struct crypto_aead *tfm)
...@@ -1562,7 +1627,11 @@ static void caam_exit_common(struct caam_ctx *ctx) ...@@ -1562,7 +1627,11 @@ static void caam_exit_common(struct caam_ctx *ctx)
static void caam_cra_exit(struct crypto_skcipher *tfm) static void caam_cra_exit(struct crypto_skcipher *tfm)
{ {
caam_exit_common(crypto_skcipher_ctx(tfm)); struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
if (ctx->fallback)
crypto_free_skcipher(ctx->fallback);
caam_exit_common(ctx);
} }
static void caam_cra_exit_aead(struct crypto_aead *tfm) static void caam_cra_exit_aead(struct crypto_aead *tfm)
...@@ -1665,6 +1734,7 @@ static struct caam_skcipher_alg driver_algs[] = { ...@@ -1665,6 +1734,7 @@ static struct caam_skcipher_alg driver_algs[] = {
.base = { .base = {
.cra_name = "xts(aes)", .cra_name = "xts(aes)",
.cra_driver_name = "xts-aes-caam-qi2", .cra_driver_name = "xts-aes-caam-qi2",
.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = AES_BLOCK_SIZE, .cra_blocksize = AES_BLOCK_SIZE,
}, },
.setkey = xts_skcipher_setkey, .setkey = xts_skcipher_setkey,
...@@ -2912,8 +2982,8 @@ static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg) ...@@ -2912,8 +2982,8 @@ static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
alg->base.cra_module = THIS_MODULE; alg->base.cra_module = THIS_MODULE;
alg->base.cra_priority = CAAM_CRA_PRIORITY; alg->base.cra_priority = CAAM_CRA_PRIORITY;
alg->base.cra_ctxsize = sizeof(struct caam_ctx); alg->base.cra_ctxsize = sizeof(struct caam_ctx);
alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY; CRYPTO_ALG_KERN_DRIVER_ONLY);
alg->init = caam_cra_init_skcipher; alg->init = caam_cra_init_skcipher;
alg->exit = caam_cra_exit; alg->exit = caam_cra_exit;
......
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
#include <linux/netdevice.h> #include <linux/netdevice.h>
#include "dpseci.h" #include "dpseci.h"
#include "desc_constr.h" #include "desc_constr.h"
#include <crypto/skcipher.h>
#define DPAA2_CAAM_STORE_SIZE 16 #define DPAA2_CAAM_STORE_SIZE 16
/* NAPI weight *must* be a multiple of the store size. */ /* NAPI weight *must* be a multiple of the store size. */
...@@ -186,6 +187,7 @@ struct caam_request { ...@@ -186,6 +187,7 @@ struct caam_request {
void (*cbk)(void *ctx, u32 err); void (*cbk)(void *ctx, u32 err);
void *ctx; void *ctx;
void *edesc; void *edesc;
struct skcipher_request fallback_req;
}; };
/** /**
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment