Commit 44c10a83 authored by Ard Biesheuvel's avatar Ard Biesheuvel Committed by Herbert Xu

crypto: sahara - switch to skcipher API

Commit 7a7ffe65 ("crypto: skcipher - Add top-level skcipher interface")
dated 20 august 2015 introduced the new skcipher API which is supposed to
replace both blkcipher and ablkcipher. While all consumers of the API have
been converted long ago, some producers of the ablkcipher remain, forcing
us to keep the ablkcipher support routines alive, along with the matching
code to expose [a]blkciphers via the skcipher API.

So switch this driver to the skcipher API, allowing us to finally drop the
ablkcipher code in the near future.
Signed-off-by: default avatarArd Biesheuvel <ardb@kernel.org>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent c2609391
...@@ -547,7 +547,7 @@ static int sahara_hw_descriptor_create(struct sahara_dev *dev) ...@@ -547,7 +547,7 @@ static int sahara_hw_descriptor_create(struct sahara_dev *dev)
return -EINVAL; return -EINVAL;
} }
static int sahara_aes_process(struct ablkcipher_request *req) static int sahara_aes_process(struct skcipher_request *req)
{ {
struct sahara_dev *dev = dev_ptr; struct sahara_dev *dev = dev_ptr;
struct sahara_ctx *ctx; struct sahara_ctx *ctx;
...@@ -558,20 +558,20 @@ static int sahara_aes_process(struct ablkcipher_request *req) ...@@ -558,20 +558,20 @@ static int sahara_aes_process(struct ablkcipher_request *req)
/* Request is ready to be dispatched by the device */ /* Request is ready to be dispatched by the device */
dev_dbg(dev->device, dev_dbg(dev->device,
"dispatch request (nbytes=%d, src=%p, dst=%p)\n", "dispatch request (nbytes=%d, src=%p, dst=%p)\n",
req->nbytes, req->src, req->dst); req->cryptlen, req->src, req->dst);
/* assign new request to device */ /* assign new request to device */
dev->total = req->nbytes; dev->total = req->cryptlen;
dev->in_sg = req->src; dev->in_sg = req->src;
dev->out_sg = req->dst; dev->out_sg = req->dst;
rctx = ablkcipher_request_ctx(req); rctx = skcipher_request_ctx(req);
ctx = crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req)); ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
rctx->mode &= FLAGS_MODE_MASK; rctx->mode &= FLAGS_MODE_MASK;
dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode; dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode;
if ((dev->flags & FLAGS_CBC) && req->info) if ((dev->flags & FLAGS_CBC) && req->iv)
memcpy(dev->iv_base, req->info, AES_KEYSIZE_128); memcpy(dev->iv_base, req->iv, AES_KEYSIZE_128);
/* assign new context to device */ /* assign new context to device */
dev->ctx = ctx; dev->ctx = ctx;
...@@ -597,10 +597,10 @@ static int sahara_aes_process(struct ablkcipher_request *req) ...@@ -597,10 +597,10 @@ static int sahara_aes_process(struct ablkcipher_request *req)
return 0; return 0;
} }
static int sahara_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key, static int sahara_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
unsigned int keylen) unsigned int keylen)
{ {
struct sahara_ctx *ctx = crypto_ablkcipher_ctx(tfm); struct sahara_ctx *ctx = crypto_skcipher_ctx(tfm);
int ret; int ret;
ctx->keylen = keylen; ctx->keylen = keylen;
...@@ -630,16 +630,16 @@ static int sahara_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key, ...@@ -630,16 +630,16 @@ static int sahara_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
return ret; return ret;
} }
static int sahara_aes_crypt(struct ablkcipher_request *req, unsigned long mode) static int sahara_aes_crypt(struct skcipher_request *req, unsigned long mode)
{ {
struct sahara_aes_reqctx *rctx = ablkcipher_request_ctx(req); struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req);
struct sahara_dev *dev = dev_ptr; struct sahara_dev *dev = dev_ptr;
int err = 0; int err = 0;
dev_dbg(dev->device, "nbytes: %d, enc: %d, cbc: %d\n", dev_dbg(dev->device, "nbytes: %d, enc: %d, cbc: %d\n",
req->nbytes, !!(mode & FLAGS_ENCRYPT), !!(mode & FLAGS_CBC)); req->cryptlen, !!(mode & FLAGS_ENCRYPT), !!(mode & FLAGS_CBC));
if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) { if (!IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE)) {
dev_err(dev->device, dev_err(dev->device,
"request size is not exact amount of AES blocks\n"); "request size is not exact amount of AES blocks\n");
return -EINVAL; return -EINVAL;
...@@ -648,7 +648,7 @@ static int sahara_aes_crypt(struct ablkcipher_request *req, unsigned long mode) ...@@ -648,7 +648,7 @@ static int sahara_aes_crypt(struct ablkcipher_request *req, unsigned long mode)
rctx->mode = mode; rctx->mode = mode;
mutex_lock(&dev->queue_mutex); mutex_lock(&dev->queue_mutex);
err = ablkcipher_enqueue_request(&dev->queue, req); err = crypto_enqueue_request(&dev->queue, &req->base);
mutex_unlock(&dev->queue_mutex); mutex_unlock(&dev->queue_mutex);
wake_up_process(dev->kthread); wake_up_process(dev->kthread);
...@@ -656,10 +656,10 @@ static int sahara_aes_crypt(struct ablkcipher_request *req, unsigned long mode) ...@@ -656,10 +656,10 @@ static int sahara_aes_crypt(struct ablkcipher_request *req, unsigned long mode)
return err; return err;
} }
static int sahara_aes_ecb_encrypt(struct ablkcipher_request *req) static int sahara_aes_ecb_encrypt(struct skcipher_request *req)
{ {
struct sahara_ctx *ctx = crypto_ablkcipher_ctx( struct sahara_ctx *ctx = crypto_skcipher_ctx(
crypto_ablkcipher_reqtfm(req)); crypto_skcipher_reqtfm(req));
int err; int err;
if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
...@@ -669,7 +669,7 @@ static int sahara_aes_ecb_encrypt(struct ablkcipher_request *req) ...@@ -669,7 +669,7 @@ static int sahara_aes_ecb_encrypt(struct ablkcipher_request *req)
skcipher_request_set_callback(subreq, req->base.flags, skcipher_request_set_callback(subreq, req->base.flags,
NULL, NULL); NULL, NULL);
skcipher_request_set_crypt(subreq, req->src, req->dst, skcipher_request_set_crypt(subreq, req->src, req->dst,
req->nbytes, req->info); req->cryptlen, req->iv);
err = crypto_skcipher_encrypt(subreq); err = crypto_skcipher_encrypt(subreq);
skcipher_request_zero(subreq); skcipher_request_zero(subreq);
return err; return err;
...@@ -678,10 +678,10 @@ static int sahara_aes_ecb_encrypt(struct ablkcipher_request *req) ...@@ -678,10 +678,10 @@ static int sahara_aes_ecb_encrypt(struct ablkcipher_request *req)
return sahara_aes_crypt(req, FLAGS_ENCRYPT); return sahara_aes_crypt(req, FLAGS_ENCRYPT);
} }
static int sahara_aes_ecb_decrypt(struct ablkcipher_request *req) static int sahara_aes_ecb_decrypt(struct skcipher_request *req)
{ {
struct sahara_ctx *ctx = crypto_ablkcipher_ctx( struct sahara_ctx *ctx = crypto_skcipher_ctx(
crypto_ablkcipher_reqtfm(req)); crypto_skcipher_reqtfm(req));
int err; int err;
if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
...@@ -691,7 +691,7 @@ static int sahara_aes_ecb_decrypt(struct ablkcipher_request *req) ...@@ -691,7 +691,7 @@ static int sahara_aes_ecb_decrypt(struct ablkcipher_request *req)
skcipher_request_set_callback(subreq, req->base.flags, skcipher_request_set_callback(subreq, req->base.flags,
NULL, NULL); NULL, NULL);
skcipher_request_set_crypt(subreq, req->src, req->dst, skcipher_request_set_crypt(subreq, req->src, req->dst,
req->nbytes, req->info); req->cryptlen, req->iv);
err = crypto_skcipher_decrypt(subreq); err = crypto_skcipher_decrypt(subreq);
skcipher_request_zero(subreq); skcipher_request_zero(subreq);
return err; return err;
...@@ -700,10 +700,10 @@ static int sahara_aes_ecb_decrypt(struct ablkcipher_request *req) ...@@ -700,10 +700,10 @@ static int sahara_aes_ecb_decrypt(struct ablkcipher_request *req)
return sahara_aes_crypt(req, 0); return sahara_aes_crypt(req, 0);
} }
static int sahara_aes_cbc_encrypt(struct ablkcipher_request *req) static int sahara_aes_cbc_encrypt(struct skcipher_request *req)
{ {
struct sahara_ctx *ctx = crypto_ablkcipher_ctx( struct sahara_ctx *ctx = crypto_skcipher_ctx(
crypto_ablkcipher_reqtfm(req)); crypto_skcipher_reqtfm(req));
int err; int err;
if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
...@@ -713,7 +713,7 @@ static int sahara_aes_cbc_encrypt(struct ablkcipher_request *req) ...@@ -713,7 +713,7 @@ static int sahara_aes_cbc_encrypt(struct ablkcipher_request *req)
skcipher_request_set_callback(subreq, req->base.flags, skcipher_request_set_callback(subreq, req->base.flags,
NULL, NULL); NULL, NULL);
skcipher_request_set_crypt(subreq, req->src, req->dst, skcipher_request_set_crypt(subreq, req->src, req->dst,
req->nbytes, req->info); req->cryptlen, req->iv);
err = crypto_skcipher_encrypt(subreq); err = crypto_skcipher_encrypt(subreq);
skcipher_request_zero(subreq); skcipher_request_zero(subreq);
return err; return err;
...@@ -722,10 +722,10 @@ static int sahara_aes_cbc_encrypt(struct ablkcipher_request *req) ...@@ -722,10 +722,10 @@ static int sahara_aes_cbc_encrypt(struct ablkcipher_request *req)
return sahara_aes_crypt(req, FLAGS_ENCRYPT | FLAGS_CBC); return sahara_aes_crypt(req, FLAGS_ENCRYPT | FLAGS_CBC);
} }
static int sahara_aes_cbc_decrypt(struct ablkcipher_request *req) static int sahara_aes_cbc_decrypt(struct skcipher_request *req)
{ {
struct sahara_ctx *ctx = crypto_ablkcipher_ctx( struct sahara_ctx *ctx = crypto_skcipher_ctx(
crypto_ablkcipher_reqtfm(req)); crypto_skcipher_reqtfm(req));
int err; int err;
if (unlikely(ctx->keylen != AES_KEYSIZE_128)) { if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
...@@ -735,7 +735,7 @@ static int sahara_aes_cbc_decrypt(struct ablkcipher_request *req) ...@@ -735,7 +735,7 @@ static int sahara_aes_cbc_decrypt(struct ablkcipher_request *req)
skcipher_request_set_callback(subreq, req->base.flags, skcipher_request_set_callback(subreq, req->base.flags,
NULL, NULL); NULL, NULL);
skcipher_request_set_crypt(subreq, req->src, req->dst, skcipher_request_set_crypt(subreq, req->src, req->dst,
req->nbytes, req->info); req->cryptlen, req->iv);
err = crypto_skcipher_decrypt(subreq); err = crypto_skcipher_decrypt(subreq);
skcipher_request_zero(subreq); skcipher_request_zero(subreq);
return err; return err;
...@@ -744,10 +744,10 @@ static int sahara_aes_cbc_decrypt(struct ablkcipher_request *req) ...@@ -744,10 +744,10 @@ static int sahara_aes_cbc_decrypt(struct ablkcipher_request *req)
return sahara_aes_crypt(req, FLAGS_CBC); return sahara_aes_crypt(req, FLAGS_CBC);
} }
static int sahara_aes_cra_init(struct crypto_tfm *tfm) static int sahara_aes_init_tfm(struct crypto_skcipher *tfm)
{ {
const char *name = crypto_tfm_alg_name(tfm); const char *name = crypto_tfm_alg_name(&tfm->base);
struct sahara_ctx *ctx = crypto_tfm_ctx(tfm); struct sahara_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx->fallback = crypto_alloc_sync_skcipher(name, 0, ctx->fallback = crypto_alloc_sync_skcipher(name, 0,
CRYPTO_ALG_NEED_FALLBACK); CRYPTO_ALG_NEED_FALLBACK);
...@@ -756,14 +756,14 @@ static int sahara_aes_cra_init(struct crypto_tfm *tfm) ...@@ -756,14 +756,14 @@ static int sahara_aes_cra_init(struct crypto_tfm *tfm)
return PTR_ERR(ctx->fallback); return PTR_ERR(ctx->fallback);
} }
tfm->crt_ablkcipher.reqsize = sizeof(struct sahara_aes_reqctx); crypto_skcipher_set_reqsize(tfm, sizeof(struct sahara_aes_reqctx));
return 0; return 0;
} }
static void sahara_aes_cra_exit(struct crypto_tfm *tfm) static void sahara_aes_exit_tfm(struct crypto_skcipher *tfm)
{ {
struct sahara_ctx *ctx = crypto_tfm_ctx(tfm); struct sahara_ctx *ctx = crypto_skcipher_ctx(tfm);
crypto_free_sync_skcipher(ctx->fallback); crypto_free_sync_skcipher(ctx->fallback);
} }
...@@ -1071,8 +1071,8 @@ static int sahara_queue_manage(void *data) ...@@ -1071,8 +1071,8 @@ static int sahara_queue_manage(void *data)
ret = sahara_sha_process(req); ret = sahara_sha_process(req);
} else { } else {
struct ablkcipher_request *req = struct skcipher_request *req =
ablkcipher_request_cast(async_req); skcipher_request_cast(async_req);
ret = sahara_aes_process(req); ret = sahara_aes_process(req);
} }
...@@ -1189,48 +1189,42 @@ static int sahara_sha_cra_init(struct crypto_tfm *tfm) ...@@ -1189,48 +1189,42 @@ static int sahara_sha_cra_init(struct crypto_tfm *tfm)
return 0; return 0;
} }
static struct crypto_alg aes_algs[] = { static struct skcipher_alg aes_algs[] = {
{ {
.cra_name = "ecb(aes)", .base.cra_name = "ecb(aes)",
.cra_driver_name = "sahara-ecb-aes", .base.cra_driver_name = "sahara-ecb-aes",
.cra_priority = 300, .base.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK, .base.cra_blocksize = AES_BLOCK_SIZE,
.cra_blocksize = AES_BLOCK_SIZE, .base.cra_ctxsize = sizeof(struct sahara_ctx),
.cra_ctxsize = sizeof(struct sahara_ctx), .base.cra_alignmask = 0x0,
.cra_alignmask = 0x0, .base.cra_module = THIS_MODULE,
.cra_type = &crypto_ablkcipher_type,
.cra_module = THIS_MODULE, .init = sahara_aes_init_tfm,
.cra_init = sahara_aes_cra_init, .exit = sahara_aes_exit_tfm,
.cra_exit = sahara_aes_cra_exit,
.cra_u.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE , .min_keysize = AES_MIN_KEY_SIZE ,
.max_keysize = AES_MAX_KEY_SIZE, .max_keysize = AES_MAX_KEY_SIZE,
.setkey = sahara_aes_setkey, .setkey = sahara_aes_setkey,
.encrypt = sahara_aes_ecb_encrypt, .encrypt = sahara_aes_ecb_encrypt,
.decrypt = sahara_aes_ecb_decrypt, .decrypt = sahara_aes_ecb_decrypt,
}
}, { }, {
.cra_name = "cbc(aes)", .base.cra_name = "cbc(aes)",
.cra_driver_name = "sahara-cbc-aes", .base.cra_driver_name = "sahara-cbc-aes",
.cra_priority = 300, .base.cra_priority = 300,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK, .base.cra_blocksize = AES_BLOCK_SIZE,
.cra_blocksize = AES_BLOCK_SIZE, .base.cra_ctxsize = sizeof(struct sahara_ctx),
.cra_ctxsize = sizeof(struct sahara_ctx), .base.cra_alignmask = 0x0,
.cra_alignmask = 0x0, .base.cra_module = THIS_MODULE,
.cra_type = &crypto_ablkcipher_type,
.cra_module = THIS_MODULE, .init = sahara_aes_init_tfm,
.cra_init = sahara_aes_cra_init, .exit = sahara_aes_exit_tfm,
.cra_exit = sahara_aes_cra_exit,
.cra_u.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE , .min_keysize = AES_MIN_KEY_SIZE ,
.max_keysize = AES_MAX_KEY_SIZE, .max_keysize = AES_MAX_KEY_SIZE,
.ivsize = AES_BLOCK_SIZE, .ivsize = AES_BLOCK_SIZE,
.setkey = sahara_aes_setkey, .setkey = sahara_aes_setkey,
.encrypt = sahara_aes_cbc_encrypt, .encrypt = sahara_aes_cbc_encrypt,
.decrypt = sahara_aes_cbc_decrypt, .decrypt = sahara_aes_cbc_decrypt,
}
} }
}; };
...@@ -1318,7 +1312,7 @@ static int sahara_register_algs(struct sahara_dev *dev) ...@@ -1318,7 +1312,7 @@ static int sahara_register_algs(struct sahara_dev *dev)
unsigned int i, j, k, l; unsigned int i, j, k, l;
for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
err = crypto_register_alg(&aes_algs[i]); err = crypto_register_skcipher(&aes_algs[i]);
if (err) if (err)
goto err_aes_algs; goto err_aes_algs;
} }
...@@ -1348,7 +1342,7 @@ static int sahara_register_algs(struct sahara_dev *dev) ...@@ -1348,7 +1342,7 @@ static int sahara_register_algs(struct sahara_dev *dev)
err_aes_algs: err_aes_algs:
for (j = 0; j < i; j++) for (j = 0; j < i; j++)
crypto_unregister_alg(&aes_algs[j]); crypto_unregister_skcipher(&aes_algs[j]);
return err; return err;
} }
...@@ -1358,7 +1352,7 @@ static void sahara_unregister_algs(struct sahara_dev *dev) ...@@ -1358,7 +1352,7 @@ static void sahara_unregister_algs(struct sahara_dev *dev)
unsigned int i; unsigned int i;
for (i = 0; i < ARRAY_SIZE(aes_algs); i++) for (i = 0; i < ARRAY_SIZE(aes_algs); i++)
crypto_unregister_alg(&aes_algs[i]); crypto_unregister_skcipher(&aes_algs[i]);
for (i = 0; i < ARRAY_SIZE(sha_v3_algs); i++) for (i = 0; i < ARRAY_SIZE(sha_v3_algs); i++)
crypto_unregister_ahash(&sha_v3_algs[i]); crypto_unregister_ahash(&sha_v3_algs[i]);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment