Commit 1eb7b403 authored by Ofer Heifetz's avatar Ofer Heifetz Committed by Herbert Xu

crypto: inside-secure - per request invalidation

When an invalidation request is needed we currently override the context
.send and .handle_result helpers. This is wrong as under high load other
requests can already be queued and overriding the context helpers will
make them execute the wrong .send and .handle_result functions.

This commit fixes this by adding a needs_inv flag in the request to
choose the action to perform when sending requests or handling their
results. This flag will be set when needed (i.e. when the context flag
will be set).

Fixes: 1b44c5a6 ("crypto: inside-secure - add SafeXcel EIP197 crypto engine driver")
Signed-off-by: default avatarOfer Heifetz <oferh@marvell.com>
[Antoine: commit message, and removed non related changes from the
original commit]
Signed-off-by: default avatarAntoine Tenart <antoine.tenart@free-electrons.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent bbc25bee
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
#include <crypto/aes.h> #include <crypto/aes.h>
#include <crypto/skcipher.h> #include <crypto/skcipher.h>
#include <crypto/internal/skcipher.h>
#include "safexcel.h" #include "safexcel.h"
...@@ -33,6 +34,10 @@ struct safexcel_cipher_ctx { ...@@ -33,6 +34,10 @@ struct safexcel_cipher_ctx {
unsigned int key_len; unsigned int key_len;
}; };
struct safexcel_cipher_req {
bool needs_inv;
};
static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx,
struct crypto_async_request *async, struct crypto_async_request *async,
struct safexcel_command_desc *cdesc, struct safexcel_command_desc *cdesc,
...@@ -126,9 +131,9 @@ static int safexcel_context_control(struct safexcel_cipher_ctx *ctx, ...@@ -126,9 +131,9 @@ static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
return 0; return 0;
} }
static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring, static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
struct crypto_async_request *async, struct crypto_async_request *async,
bool *should_complete, int *ret) bool *should_complete, int *ret)
{ {
struct skcipher_request *req = skcipher_request_cast(async); struct skcipher_request *req = skcipher_request_cast(async);
struct safexcel_result_desc *rdesc; struct safexcel_result_desc *rdesc;
...@@ -265,7 +270,6 @@ static int safexcel_aes_send(struct crypto_async_request *async, ...@@ -265,7 +270,6 @@ static int safexcel_aes_send(struct crypto_async_request *async,
spin_unlock_bh(&priv->ring[ring].egress_lock); spin_unlock_bh(&priv->ring[ring].egress_lock);
request->req = &req->base; request->req = &req->base;
ctx->base.handle_result = safexcel_handle_result;
*commands = n_cdesc; *commands = n_cdesc;
*results = n_rdesc; *results = n_rdesc;
...@@ -341,8 +345,6 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv, ...@@ -341,8 +345,6 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
ring = safexcel_select_ring(priv); ring = safexcel_select_ring(priv);
ctx->base.ring = ring; ctx->base.ring = ring;
ctx->base.needs_inv = false;
ctx->base.send = safexcel_aes_send;
spin_lock_bh(&priv->ring[ring].queue_lock); spin_lock_bh(&priv->ring[ring].queue_lock);
enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async); enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
...@@ -359,6 +361,26 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv, ...@@ -359,6 +361,26 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
return ndesc; return ndesc;
} }
static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
struct crypto_async_request *async,
bool *should_complete, int *ret)
{
struct skcipher_request *req = skcipher_request_cast(async);
struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
int err;
if (sreq->needs_inv) {
sreq->needs_inv = false;
err = safexcel_handle_inv_result(priv, ring, async,
should_complete, ret);
} else {
err = safexcel_handle_req_result(priv, ring, async,
should_complete, ret);
}
return err;
}
static int safexcel_cipher_send_inv(struct crypto_async_request *async, static int safexcel_cipher_send_inv(struct crypto_async_request *async,
int ring, struct safexcel_request *request, int ring, struct safexcel_request *request,
int *commands, int *results) int *commands, int *results)
...@@ -368,8 +390,6 @@ static int safexcel_cipher_send_inv(struct crypto_async_request *async, ...@@ -368,8 +390,6 @@ static int safexcel_cipher_send_inv(struct crypto_async_request *async,
struct safexcel_crypto_priv *priv = ctx->priv; struct safexcel_crypto_priv *priv = ctx->priv;
int ret; int ret;
ctx->base.handle_result = safexcel_handle_inv_result;
ret = safexcel_invalidate_cache(async, &ctx->base, priv, ret = safexcel_invalidate_cache(async, &ctx->base, priv,
ctx->base.ctxr_dma, ring, request); ctx->base.ctxr_dma, ring, request);
if (unlikely(ret)) if (unlikely(ret))
...@@ -381,11 +401,29 @@ static int safexcel_cipher_send_inv(struct crypto_async_request *async, ...@@ -381,11 +401,29 @@ static int safexcel_cipher_send_inv(struct crypto_async_request *async,
return 0; return 0;
} }
static int safexcel_send(struct crypto_async_request *async,
int ring, struct safexcel_request *request,
int *commands, int *results)
{
struct skcipher_request *req = skcipher_request_cast(async);
struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
int ret;
if (sreq->needs_inv)
ret = safexcel_cipher_send_inv(async, ring, request,
commands, results);
else
ret = safexcel_aes_send(async, ring, request,
commands, results);
return ret;
}
static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm) static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm)
{ {
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
struct safexcel_crypto_priv *priv = ctx->priv; struct safexcel_crypto_priv *priv = ctx->priv;
struct skcipher_request req; struct skcipher_request req;
struct safexcel_cipher_req *sreq = skcipher_request_ctx(&req);
struct safexcel_inv_result result = {}; struct safexcel_inv_result result = {};
int ring = ctx->base.ring; int ring = ctx->base.ring;
...@@ -399,7 +437,7 @@ static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm) ...@@ -399,7 +437,7 @@ static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm)
skcipher_request_set_tfm(&req, __crypto_skcipher_cast(tfm)); skcipher_request_set_tfm(&req, __crypto_skcipher_cast(tfm));
ctx = crypto_tfm_ctx(req.base.tfm); ctx = crypto_tfm_ctx(req.base.tfm);
ctx->base.exit_inv = true; ctx->base.exit_inv = true;
ctx->base.send = safexcel_cipher_send_inv; sreq->needs_inv = true;
spin_lock_bh(&priv->ring[ring].queue_lock); spin_lock_bh(&priv->ring[ring].queue_lock);
crypto_enqueue_request(&priv->ring[ring].queue, &req.base); crypto_enqueue_request(&priv->ring[ring].queue, &req.base);
...@@ -424,19 +462,21 @@ static int safexcel_aes(struct skcipher_request *req, ...@@ -424,19 +462,21 @@ static int safexcel_aes(struct skcipher_request *req,
enum safexcel_cipher_direction dir, u32 mode) enum safexcel_cipher_direction dir, u32 mode)
{ {
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm); struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
struct safexcel_crypto_priv *priv = ctx->priv; struct safexcel_crypto_priv *priv = ctx->priv;
int ret, ring; int ret, ring;
sreq->needs_inv = false;
ctx->direction = dir; ctx->direction = dir;
ctx->mode = mode; ctx->mode = mode;
if (ctx->base.ctxr) { if (ctx->base.ctxr) {
if (ctx->base.needs_inv) if (ctx->base.needs_inv) {
ctx->base.send = safexcel_cipher_send_inv; sreq->needs_inv = true;
ctx->base.needs_inv = false;
}
} else { } else {
ctx->base.ring = safexcel_select_ring(priv); ctx->base.ring = safexcel_select_ring(priv);
ctx->base.send = safexcel_aes_send;
ctx->base.ctxr = dma_pool_zalloc(priv->context_pool, ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
EIP197_GFP_FLAGS(req->base), EIP197_GFP_FLAGS(req->base),
&ctx->base.ctxr_dma); &ctx->base.ctxr_dma);
...@@ -476,6 +516,11 @@ static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm) ...@@ -476,6 +516,11 @@ static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
alg.skcipher.base); alg.skcipher.base);
ctx->priv = tmpl->priv; ctx->priv = tmpl->priv;
ctx->base.send = safexcel_send;
ctx->base.handle_result = safexcel_handle_result;
crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
sizeof(struct safexcel_cipher_req));
return 0; return 0;
} }
......
...@@ -32,6 +32,7 @@ struct safexcel_ahash_req { ...@@ -32,6 +32,7 @@ struct safexcel_ahash_req {
bool last_req; bool last_req;
bool finish; bool finish;
bool hmac; bool hmac;
bool needs_inv;
u8 state_sz; /* expected sate size, only set once */ u8 state_sz; /* expected sate size, only set once */
u32 state[SHA256_DIGEST_SIZE / sizeof(u32)]; u32 state[SHA256_DIGEST_SIZE / sizeof(u32)];
...@@ -119,9 +120,9 @@ static void safexcel_context_control(struct safexcel_ahash_ctx *ctx, ...@@ -119,9 +120,9 @@ static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
} }
} }
static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring, static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
struct crypto_async_request *async, struct crypto_async_request *async,
bool *should_complete, int *ret) bool *should_complete, int *ret)
{ {
struct safexcel_result_desc *rdesc; struct safexcel_result_desc *rdesc;
struct ahash_request *areq = ahash_request_cast(async); struct ahash_request *areq = ahash_request_cast(async);
...@@ -165,9 +166,9 @@ static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring, ...@@ -165,9 +166,9 @@ static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
return 1; return 1;
} }
static int safexcel_ahash_send(struct crypto_async_request *async, int ring, static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
struct safexcel_request *request, int *commands, struct safexcel_request *request,
int *results) int *commands, int *results)
{ {
struct ahash_request *areq = ahash_request_cast(async); struct ahash_request *areq = ahash_request_cast(async);
struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
...@@ -292,7 +293,6 @@ static int safexcel_ahash_send(struct crypto_async_request *async, int ring, ...@@ -292,7 +293,6 @@ static int safexcel_ahash_send(struct crypto_async_request *async, int ring,
req->processed += len; req->processed += len;
request->req = &areq->base; request->req = &areq->base;
ctx->base.handle_result = safexcel_handle_result;
*commands = n_cdesc; *commands = n_cdesc;
*results = 1; *results = 1;
...@@ -374,8 +374,6 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv, ...@@ -374,8 +374,6 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
ring = safexcel_select_ring(priv); ring = safexcel_select_ring(priv);
ctx->base.ring = ring; ctx->base.ring = ring;
ctx->base.needs_inv = false;
ctx->base.send = safexcel_ahash_send;
spin_lock_bh(&priv->ring[ring].queue_lock); spin_lock_bh(&priv->ring[ring].queue_lock);
enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async); enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
...@@ -392,6 +390,26 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv, ...@@ -392,6 +390,26 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
return 1; return 1;
} }
static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
struct crypto_async_request *async,
bool *should_complete, int *ret)
{
struct ahash_request *areq = ahash_request_cast(async);
struct safexcel_ahash_req *req = ahash_request_ctx(areq);
int err;
if (req->needs_inv) {
req->needs_inv = false;
err = safexcel_handle_inv_result(priv, ring, async,
should_complete, ret);
} else {
err = safexcel_handle_req_result(priv, ring, async,
should_complete, ret);
}
return err;
}
static int safexcel_ahash_send_inv(struct crypto_async_request *async, static int safexcel_ahash_send_inv(struct crypto_async_request *async,
int ring, struct safexcel_request *request, int ring, struct safexcel_request *request,
int *commands, int *results) int *commands, int *results)
...@@ -400,7 +418,6 @@ static int safexcel_ahash_send_inv(struct crypto_async_request *async, ...@@ -400,7 +418,6 @@ static int safexcel_ahash_send_inv(struct crypto_async_request *async,
struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
int ret; int ret;
ctx->base.handle_result = safexcel_handle_inv_result;
ret = safexcel_invalidate_cache(async, &ctx->base, ctx->priv, ret = safexcel_invalidate_cache(async, &ctx->base, ctx->priv,
ctx->base.ctxr_dma, ring, request); ctx->base.ctxr_dma, ring, request);
if (unlikely(ret)) if (unlikely(ret))
...@@ -412,11 +429,29 @@ static int safexcel_ahash_send_inv(struct crypto_async_request *async, ...@@ -412,11 +429,29 @@ static int safexcel_ahash_send_inv(struct crypto_async_request *async,
return 0; return 0;
} }
static int safexcel_ahash_send(struct crypto_async_request *async,
int ring, struct safexcel_request *request,
int *commands, int *results)
{
struct ahash_request *areq = ahash_request_cast(async);
struct safexcel_ahash_req *req = ahash_request_ctx(areq);
int ret;
if (req->needs_inv)
ret = safexcel_ahash_send_inv(async, ring, request,
commands, results);
else
ret = safexcel_ahash_send_req(async, ring, request,
commands, results);
return ret;
}
static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm) static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
{ {
struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm); struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
struct safexcel_crypto_priv *priv = ctx->priv; struct safexcel_crypto_priv *priv = ctx->priv;
struct ahash_request req; struct ahash_request req;
struct safexcel_ahash_req *rctx = ahash_request_ctx(&req);
struct safexcel_inv_result result = {}; struct safexcel_inv_result result = {};
int ring = ctx->base.ring; int ring = ctx->base.ring;
...@@ -430,7 +465,7 @@ static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm) ...@@ -430,7 +465,7 @@ static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
ahash_request_set_tfm(&req, __crypto_ahash_cast(tfm)); ahash_request_set_tfm(&req, __crypto_ahash_cast(tfm));
ctx = crypto_tfm_ctx(req.base.tfm); ctx = crypto_tfm_ctx(req.base.tfm);
ctx->base.exit_inv = true; ctx->base.exit_inv = true;
ctx->base.send = safexcel_ahash_send_inv; rctx->needs_inv = true;
spin_lock_bh(&priv->ring[ring].queue_lock); spin_lock_bh(&priv->ring[ring].queue_lock);
crypto_enqueue_request(&priv->ring[ring].queue, &req.base); crypto_enqueue_request(&priv->ring[ring].queue, &req.base);
...@@ -481,14 +516,16 @@ static int safexcel_ahash_enqueue(struct ahash_request *areq) ...@@ -481,14 +516,16 @@ static int safexcel_ahash_enqueue(struct ahash_request *areq)
struct safexcel_crypto_priv *priv = ctx->priv; struct safexcel_crypto_priv *priv = ctx->priv;
int ret, ring; int ret, ring;
ctx->base.send = safexcel_ahash_send; req->needs_inv = false;
if (req->processed && ctx->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) if (req->processed && ctx->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED)
ctx->base.needs_inv = safexcel_ahash_needs_inv_get(areq); ctx->base.needs_inv = safexcel_ahash_needs_inv_get(areq);
if (ctx->base.ctxr) { if (ctx->base.ctxr) {
if (ctx->base.needs_inv) if (ctx->base.needs_inv) {
ctx->base.send = safexcel_ahash_send_inv; ctx->base.needs_inv = false;
req->needs_inv = true;
}
} else { } else {
ctx->base.ring = safexcel_select_ring(priv); ctx->base.ring = safexcel_select_ring(priv);
ctx->base.ctxr = dma_pool_zalloc(priv->context_pool, ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
...@@ -622,6 +659,8 @@ static int safexcel_ahash_cra_init(struct crypto_tfm *tfm) ...@@ -622,6 +659,8 @@ static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
struct safexcel_alg_template, alg.ahash); struct safexcel_alg_template, alg.ahash);
ctx->priv = tmpl->priv; ctx->priv = tmpl->priv;
ctx->base.send = safexcel_ahash_send;
ctx->base.handle_result = safexcel_handle_result;
crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
sizeof(struct safexcel_ahash_req)); sizeof(struct safexcel_ahash_req));
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment