Commit 9e345711 authored by Ard Biesheuvel's avatar Ard Biesheuvel Committed by Herbert Xu

crypto: arm64/gcm - add RFC4106 support

Add support for RFC4106 ESP encapsulation to the accelerated GCM
implementation. This results in a ~10% speedup for IPsec frames of
typical size (~1420 bytes) on Cortex-A53.
Signed-off-by: default avatarArd Biesheuvel <ardb@kernel.org>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 596f674d
...@@ -9,6 +9,7 @@ ...@@ -9,6 +9,7 @@
#include <asm/simd.h> #include <asm/simd.h>
#include <asm/unaligned.h> #include <asm/unaligned.h>
#include <crypto/aes.h> #include <crypto/aes.h>
#include <crypto/gcm.h>
#include <crypto/algapi.h> #include <crypto/algapi.h>
#include <crypto/b128ops.h> #include <crypto/b128ops.h>
#include <crypto/gf128mul.h> #include <crypto/gf128mul.h>
...@@ -28,7 +29,8 @@ MODULE_ALIAS_CRYPTO("ghash"); ...@@ -28,7 +29,8 @@ MODULE_ALIAS_CRYPTO("ghash");
#define GHASH_BLOCK_SIZE 16 #define GHASH_BLOCK_SIZE 16
#define GHASH_DIGEST_SIZE 16 #define GHASH_DIGEST_SIZE 16
#define GCM_IV_SIZE 12
#define RFC4106_NONCE_SIZE 4
struct ghash_key { struct ghash_key {
be128 k; be128 k;
...@@ -43,6 +45,7 @@ struct ghash_desc_ctx { ...@@ -43,6 +45,7 @@ struct ghash_desc_ctx {
struct gcm_aes_ctx { struct gcm_aes_ctx {
struct crypto_aes_ctx aes_key; struct crypto_aes_ctx aes_key;
u8 nonce[RFC4106_NONCE_SIZE];
struct ghash_key ghash_key; struct ghash_key ghash_key;
}; };
...@@ -226,8 +229,8 @@ static int num_rounds(struct crypto_aes_ctx *ctx) ...@@ -226,8 +229,8 @@ static int num_rounds(struct crypto_aes_ctx *ctx)
return 6 + ctx->key_length / 4; return 6 + ctx->key_length / 4;
} }
static int gcm_setkey(struct crypto_aead *tfm, const u8 *inkey, static int gcm_aes_setkey(struct crypto_aead *tfm, const u8 *inkey,
unsigned int keylen) unsigned int keylen)
{ {
struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm); struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm);
u8 key[GHASH_BLOCK_SIZE]; u8 key[GHASH_BLOCK_SIZE];
...@@ -258,17 +261,9 @@ static int gcm_setkey(struct crypto_aead *tfm, const u8 *inkey, ...@@ -258,17 +261,9 @@ static int gcm_setkey(struct crypto_aead *tfm, const u8 *inkey,
return 0; return 0;
} }
static int gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize) static int gcm_aes_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
{ {
switch (authsize) { return crypto_gcm_check_authsize(authsize);
case 4:
case 8:
case 12 ... 16:
break;
default:
return -EINVAL;
}
return 0;
} }
static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[], static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[],
...@@ -302,13 +297,12 @@ static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[], ...@@ -302,13 +297,12 @@ static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[],
} }
} }
static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[]) static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[], u32 len)
{ {
struct crypto_aead *aead = crypto_aead_reqtfm(req); struct crypto_aead *aead = crypto_aead_reqtfm(req);
struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead); struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
u8 buf[GHASH_BLOCK_SIZE]; u8 buf[GHASH_BLOCK_SIZE];
struct scatter_walk walk; struct scatter_walk walk;
u32 len = req->assoclen;
int buf_count = 0; int buf_count = 0;
scatterwalk_start(&walk, req->src); scatterwalk_start(&walk, req->src);
...@@ -338,27 +332,25 @@ static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[]) ...@@ -338,27 +332,25 @@ static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[])
} }
} }
static int gcm_encrypt(struct aead_request *req) static int gcm_encrypt(struct aead_request *req, char *iv, int assoclen)
{ {
struct crypto_aead *aead = crypto_aead_reqtfm(req); struct crypto_aead *aead = crypto_aead_reqtfm(req);
struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead); struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
int nrounds = num_rounds(&ctx->aes_key); int nrounds = num_rounds(&ctx->aes_key);
struct skcipher_walk walk; struct skcipher_walk walk;
u8 buf[AES_BLOCK_SIZE]; u8 buf[AES_BLOCK_SIZE];
u8 iv[AES_BLOCK_SIZE];
u64 dg[2] = {}; u64 dg[2] = {};
be128 lengths; be128 lengths;
u8 *tag; u8 *tag;
int err; int err;
lengths.a = cpu_to_be64(req->assoclen * 8); lengths.a = cpu_to_be64(assoclen * 8);
lengths.b = cpu_to_be64(req->cryptlen * 8); lengths.b = cpu_to_be64(req->cryptlen * 8);
if (req->assoclen) if (assoclen)
gcm_calculate_auth_mac(req, dg); gcm_calculate_auth_mac(req, dg, assoclen);
memcpy(iv, req->iv, GCM_IV_SIZE); put_unaligned_be32(2, iv + GCM_AES_IV_SIZE);
put_unaligned_be32(2, iv + GCM_IV_SIZE);
err = skcipher_walk_aead_encrypt(&walk, req, false); err = skcipher_walk_aead_encrypt(&walk, req, false);
...@@ -403,7 +395,7 @@ static int gcm_encrypt(struct aead_request *req) ...@@ -403,7 +395,7 @@ static int gcm_encrypt(struct aead_request *req)
return 0; return 0;
} }
static int gcm_decrypt(struct aead_request *req) static int gcm_decrypt(struct aead_request *req, char *iv, int assoclen)
{ {
struct crypto_aead *aead = crypto_aead_reqtfm(req); struct crypto_aead *aead = crypto_aead_reqtfm(req);
struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead); struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
...@@ -412,21 +404,19 @@ static int gcm_decrypt(struct aead_request *req) ...@@ -412,21 +404,19 @@ static int gcm_decrypt(struct aead_request *req)
struct skcipher_walk walk; struct skcipher_walk walk;
u8 otag[AES_BLOCK_SIZE]; u8 otag[AES_BLOCK_SIZE];
u8 buf[AES_BLOCK_SIZE]; u8 buf[AES_BLOCK_SIZE];
u8 iv[AES_BLOCK_SIZE];
u64 dg[2] = {}; u64 dg[2] = {};
be128 lengths; be128 lengths;
u8 *tag; u8 *tag;
int ret; int ret;
int err; int err;
lengths.a = cpu_to_be64(req->assoclen * 8); lengths.a = cpu_to_be64(assoclen * 8);
lengths.b = cpu_to_be64((req->cryptlen - authsize) * 8); lengths.b = cpu_to_be64((req->cryptlen - authsize) * 8);
if (req->assoclen) if (assoclen)
gcm_calculate_auth_mac(req, dg); gcm_calculate_auth_mac(req, dg, assoclen);
memcpy(iv, req->iv, GCM_IV_SIZE); put_unaligned_be32(2, iv + GCM_AES_IV_SIZE);
put_unaligned_be32(2, iv + GCM_IV_SIZE);
scatterwalk_map_and_copy(otag, req->src, scatterwalk_map_and_copy(otag, req->src,
req->assoclen + req->cryptlen - authsize, req->assoclen + req->cryptlen - authsize,
...@@ -471,14 +461,76 @@ static int gcm_decrypt(struct aead_request *req) ...@@ -471,14 +461,76 @@ static int gcm_decrypt(struct aead_request *req)
return ret ? -EBADMSG : 0; return ret ? -EBADMSG : 0;
} }
static struct aead_alg gcm_aes_alg = { static int gcm_aes_encrypt(struct aead_request *req)
.ivsize = GCM_IV_SIZE, {
u8 iv[AES_BLOCK_SIZE];
memcpy(iv, req->iv, GCM_AES_IV_SIZE);
return gcm_encrypt(req, iv, req->assoclen);
}
static int gcm_aes_decrypt(struct aead_request *req)
{
u8 iv[AES_BLOCK_SIZE];
memcpy(iv, req->iv, GCM_AES_IV_SIZE);
return gcm_decrypt(req, iv, req->assoclen);
}
static int rfc4106_setkey(struct crypto_aead *tfm, const u8 *inkey,
unsigned int keylen)
{
struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm);
int err;
keylen -= RFC4106_NONCE_SIZE;
err = gcm_aes_setkey(tfm, inkey, keylen);
if (err)
return err;
memcpy(ctx->nonce, inkey + keylen, RFC4106_NONCE_SIZE);
return 0;
}
static int rfc4106_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
{
return crypto_rfc4106_check_authsize(authsize);
}
static int rfc4106_encrypt(struct aead_request *req)
{
struct crypto_aead *aead = crypto_aead_reqtfm(req);
struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
u8 iv[AES_BLOCK_SIZE];
memcpy(iv, ctx->nonce, RFC4106_NONCE_SIZE);
memcpy(iv + RFC4106_NONCE_SIZE, req->iv, GCM_RFC4106_IV_SIZE);
return crypto_ipsec_check_assoclen(req->assoclen) ?:
gcm_encrypt(req, iv, req->assoclen - GCM_RFC4106_IV_SIZE);
}
static int rfc4106_decrypt(struct aead_request *req)
{
struct crypto_aead *aead = crypto_aead_reqtfm(req);
struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
u8 iv[AES_BLOCK_SIZE];
memcpy(iv, ctx->nonce, RFC4106_NONCE_SIZE);
memcpy(iv + RFC4106_NONCE_SIZE, req->iv, GCM_RFC4106_IV_SIZE);
return crypto_ipsec_check_assoclen(req->assoclen) ?:
gcm_decrypt(req, iv, req->assoclen - GCM_RFC4106_IV_SIZE);
}
static struct aead_alg gcm_aes_algs[] = {{
.ivsize = GCM_AES_IV_SIZE,
.chunksize = AES_BLOCK_SIZE, .chunksize = AES_BLOCK_SIZE,
.maxauthsize = AES_BLOCK_SIZE, .maxauthsize = AES_BLOCK_SIZE,
.setkey = gcm_setkey, .setkey = gcm_aes_setkey,
.setauthsize = gcm_setauthsize, .setauthsize = gcm_aes_setauthsize,
.encrypt = gcm_encrypt, .encrypt = gcm_aes_encrypt,
.decrypt = gcm_decrypt, .decrypt = gcm_aes_decrypt,
.base.cra_name = "gcm(aes)", .base.cra_name = "gcm(aes)",
.base.cra_driver_name = "gcm-aes-ce", .base.cra_driver_name = "gcm-aes-ce",
...@@ -487,7 +539,23 @@ static struct aead_alg gcm_aes_alg = { ...@@ -487,7 +539,23 @@ static struct aead_alg gcm_aes_alg = {
.base.cra_ctxsize = sizeof(struct gcm_aes_ctx) + .base.cra_ctxsize = sizeof(struct gcm_aes_ctx) +
4 * sizeof(u64[2]), 4 * sizeof(u64[2]),
.base.cra_module = THIS_MODULE, .base.cra_module = THIS_MODULE,
}; }, {
.ivsize = GCM_RFC4106_IV_SIZE,
.chunksize = AES_BLOCK_SIZE,
.maxauthsize = AES_BLOCK_SIZE,
.setkey = rfc4106_setkey,
.setauthsize = rfc4106_setauthsize,
.encrypt = rfc4106_encrypt,
.decrypt = rfc4106_decrypt,
.base.cra_name = "rfc4106(gcm(aes))",
.base.cra_driver_name = "rfc4106-gcm-aes-ce",
.base.cra_priority = 300,
.base.cra_blocksize = 1,
.base.cra_ctxsize = sizeof(struct gcm_aes_ctx) +
4 * sizeof(u64[2]),
.base.cra_module = THIS_MODULE,
}};
static int __init ghash_ce_mod_init(void) static int __init ghash_ce_mod_init(void)
{ {
...@@ -495,7 +563,8 @@ static int __init ghash_ce_mod_init(void) ...@@ -495,7 +563,8 @@ static int __init ghash_ce_mod_init(void)
return -ENODEV; return -ENODEV;
if (cpu_have_named_feature(PMULL)) if (cpu_have_named_feature(PMULL))
return crypto_register_aead(&gcm_aes_alg); return crypto_register_aeads(gcm_aes_algs,
ARRAY_SIZE(gcm_aes_algs));
return crypto_register_shash(&ghash_alg); return crypto_register_shash(&ghash_alg);
} }
...@@ -503,7 +572,7 @@ static int __init ghash_ce_mod_init(void) ...@@ -503,7 +572,7 @@ static int __init ghash_ce_mod_init(void)
static void __exit ghash_ce_mod_exit(void) static void __exit ghash_ce_mod_exit(void)
{ {
if (cpu_have_named_feature(PMULL)) if (cpu_have_named_feature(PMULL))
crypto_unregister_aead(&gcm_aes_alg); crypto_unregister_aeads(gcm_aes_algs, ARRAY_SIZE(gcm_aes_algs));
else else
crypto_unregister_shash(&ghash_alg); crypto_unregister_shash(&ghash_alg);
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment