Commit 5eedf315 authored by Ard Biesheuvel's avatar Ard Biesheuvel Committed by Herbert Xu

crypto: arm/aes-ce - provide a synchronous version of ctr(aes)

AES in CTR mode is used by modes such as GCM and CCM, which are often
used in contexts where only synchronous ciphers are permitted. So
provide a synchronous version of ctr(aes) based on the existing code.
This requires a non-SIMD fallback to deal with invocations occurring
from a context where SIMD instructions may not be used. We have a
helper for this now in the AES library, so wire that up.
Signed-off-by: default avatarArd Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent fafb1dca
...@@ -7,8 +7,10 @@ ...@@ -7,8 +7,10 @@
#include <asm/hwcap.h> #include <asm/hwcap.h>
#include <asm/neon.h> #include <asm/neon.h>
#include <asm/simd.h>
#include <asm/unaligned.h> #include <asm/unaligned.h>
#include <crypto/aes.h> #include <crypto/aes.h>
#include <crypto/ctr.h>
#include <crypto/internal/simd.h> #include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h> #include <crypto/internal/skcipher.h>
#include <linux/cpufeature.h> #include <linux/cpufeature.h>
...@@ -286,6 +288,29 @@ static int ctr_encrypt(struct skcipher_request *req) ...@@ -286,6 +288,29 @@ static int ctr_encrypt(struct skcipher_request *req)
return err; return err;
} }
static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst)
{
struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
unsigned long flags;
/*
* Temporarily disable interrupts to avoid races where
* cachelines are evicted when the CPU is interrupted
* to do something else.
*/
local_irq_save(flags);
aes_encrypt(ctx, dst, src);
local_irq_restore(flags);
}
static int ctr_encrypt_sync(struct skcipher_request *req)
{
if (!crypto_simd_usable())
return crypto_ctr_encrypt_walk(req, ctr_encrypt_one);
return ctr_encrypt(req);
}
static int xts_encrypt(struct skcipher_request *req) static int xts_encrypt(struct skcipher_request *req)
{ {
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
...@@ -375,6 +400,21 @@ static struct skcipher_alg aes_algs[] = { { ...@@ -375,6 +400,21 @@ static struct skcipher_alg aes_algs[] = { {
.setkey = ce_aes_setkey, .setkey = ce_aes_setkey,
.encrypt = ctr_encrypt, .encrypt = ctr_encrypt,
.decrypt = ctr_encrypt, .decrypt = ctr_encrypt,
}, {
.base.cra_name = "ctr(aes)",
.base.cra_driver_name = "ctr-aes-ce-sync",
.base.cra_priority = 300 - 1,
.base.cra_blocksize = 1,
.base.cra_ctxsize = sizeof(struct crypto_aes_ctx),
.base.cra_module = THIS_MODULE,
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
.ivsize = AES_BLOCK_SIZE,
.chunksize = AES_BLOCK_SIZE,
.setkey = ce_aes_setkey,
.encrypt = ctr_encrypt_sync,
.decrypt = ctr_encrypt_sync,
}, { }, {
.base.cra_name = "__xts(aes)", .base.cra_name = "__xts(aes)",
.base.cra_driver_name = "__xts-aes-ce", .base.cra_driver_name = "__xts-aes-ce",
...@@ -418,6 +458,9 @@ static int __init aes_init(void) ...@@ -418,6 +458,9 @@ static int __init aes_init(void)
return err; return err;
for (i = 0; i < ARRAY_SIZE(aes_algs); i++) { for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL))
continue;
algname = aes_algs[i].base.cra_name + 2; algname = aes_algs[i].base.cra_name + 2;
drvname = aes_algs[i].base.cra_driver_name + 2; drvname = aes_algs[i].base.cra_driver_name + 2;
basename = aes_algs[i].base.cra_driver_name; basename = aes_algs[i].base.cra_driver_name;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment