Commit c59de48e authored by Tianjia Zhang's avatar Tianjia Zhang Committed by Herbert Xu

crypto: arm64/sm4-ce - Make dependent on sm4 library instead of sm4-generic

SM4 library is abstracted from sm4-generic algorithm, sm4-ce can depend on
the SM4 library instead of sm4-generic, and some functions in sm4-generic
do not need to be exported.
Signed-off-by: default avatarTianjia Zhang <tianjia.zhang@linux.alibaba.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent 2b31277a
......@@ -51,7 +51,7 @@ config CRYPTO_SM4_ARM64_CE
tristate "SM4 symmetric cipher (ARMv8.2 Crypto Extensions)"
depends on KERNEL_MODE_NEON
select CRYPTO_ALGAPI
select CRYPTO_SM4
select CRYPTO_LIB_SM4
config CRYPTO_GHASH_ARM64_CE
tristate "GHASH/AES-GCM using ARMv8 Crypto Extensions"
......
......@@ -17,12 +17,20 @@ MODULE_LICENSE("GPL v2");
asmlinkage void sm4_ce_do_crypt(const u32 *rk, void *out, const void *in);
static int sm4_ce_setkey(struct crypto_tfm *tfm, const u8 *key,
unsigned int key_len)
{
struct sm4_ctx *ctx = crypto_tfm_ctx(tfm);
return sm4_expandkey(ctx, key, key_len);
}
static void sm4_ce_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
{
const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
const struct sm4_ctx *ctx = crypto_tfm_ctx(tfm);
if (!crypto_simd_usable()) {
crypto_sm4_encrypt(tfm, out, in);
sm4_crypt_block(ctx->rkey_enc, out, in);
} else {
kernel_neon_begin();
sm4_ce_do_crypt(ctx->rkey_enc, out, in);
......@@ -32,10 +40,10 @@ static void sm4_ce_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
static void sm4_ce_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
{
const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
const struct sm4_ctx *ctx = crypto_tfm_ctx(tfm);
if (!crypto_simd_usable()) {
crypto_sm4_decrypt(tfm, out, in);
sm4_crypt_block(ctx->rkey_dec, out, in);
} else {
kernel_neon_begin();
sm4_ce_do_crypt(ctx->rkey_dec, out, in);
......@@ -49,12 +57,12 @@ static struct crypto_alg sm4_ce_alg = {
.cra_priority = 200,
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = SM4_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct crypto_sm4_ctx),
.cra_ctxsize = sizeof(struct sm4_ctx),
.cra_module = THIS_MODULE,
.cra_u.cipher = {
.cia_min_keysize = SM4_KEY_SIZE,
.cia_max_keysize = SM4_KEY_SIZE,
.cia_setkey = crypto_sm4_set_key,
.cia_setkey = sm4_ce_setkey,
.cia_encrypt = sm4_ce_encrypt,
.cia_decrypt = sm4_ce_decrypt
}
......
......@@ -17,45 +17,42 @@
#include <asm/unaligned.h>
/**
* crypto_sm4_set_key - Set the SM4 key.
* sm4_setkey - Set the SM4 key.
* @tfm: The %crypto_tfm that is used in the context.
* @in_key: The input key.
* @key_len: The size of the key.
*
* This function uses sm4_expandkey() to expand the key.
* &crypto_sm4_ctx _must_ be the private data embedded in @tfm which is
* &sm4_ctx _must_ be the private data embedded in @tfm which is
* retrieved with crypto_tfm_ctx().
*
* Return: 0 on success; -EINVAL on failure (only happens for bad key lengths)
*/
int crypto_sm4_set_key(struct crypto_tfm *tfm, const u8 *in_key,
static int sm4_setkey(struct crypto_tfm *tfm, const u8 *in_key,
unsigned int key_len)
{
struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
struct sm4_ctx *ctx = crypto_tfm_ctx(tfm);
return sm4_expandkey(ctx, in_key, key_len);
}
EXPORT_SYMBOL_GPL(crypto_sm4_set_key);
/* encrypt a block of text */
void crypto_sm4_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
static void sm4_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
{
const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
const struct sm4_ctx *ctx = crypto_tfm_ctx(tfm);
sm4_crypt_block(ctx->rkey_enc, out, in);
}
EXPORT_SYMBOL_GPL(crypto_sm4_encrypt);
/* decrypt a block of text */
void crypto_sm4_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
static void sm4_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
{
const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
const struct sm4_ctx *ctx = crypto_tfm_ctx(tfm);
sm4_crypt_block(ctx->rkey_dec, out, in);
}
EXPORT_SYMBOL_GPL(crypto_sm4_decrypt);
static struct crypto_alg sm4_alg = {
.cra_name = "sm4",
......@@ -63,15 +60,15 @@ static struct crypto_alg sm4_alg = {
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = SM4_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct crypto_sm4_ctx),
.cra_ctxsize = sizeof(struct sm4_ctx),
.cra_module = THIS_MODULE,
.cra_u = {
.cipher = {
.cia_min_keysize = SM4_KEY_SIZE,
.cia_max_keysize = SM4_KEY_SIZE,
.cia_setkey = crypto_sm4_set_key,
.cia_encrypt = crypto_sm4_encrypt,
.cia_decrypt = crypto_sm4_decrypt
.cia_setkey = sm4_setkey,
.cia_encrypt = sm4_encrypt,
.cia_decrypt = sm4_decrypt
}
}
};
......
......@@ -16,7 +16,7 @@
#define SM4_BLOCK_SIZE 16
#define SM4_RKEY_WORDS 32
struct crypto_sm4_ctx {
struct sm4_ctx {
u32 rkey_enc[SM4_RKEY_WORDS];
u32 rkey_dec[SM4_RKEY_WORDS];
};
......@@ -30,7 +30,7 @@ struct crypto_sm4_ctx {
* Returns 0 on success. The function fails only if an invalid key size (or
* pointer) is supplied.
*/
int sm4_expandkey(struct crypto_sm4_ctx *ctx, const u8 *in_key,
int sm4_expandkey(struct sm4_ctx *ctx, const u8 *in_key,
unsigned int key_len);
/**
......@@ -41,9 +41,4 @@ int sm4_expandkey(struct crypto_sm4_ctx *ctx, const u8 *in_key,
*/
void sm4_crypt_block(const u32 *rk, u8 *out, const u8 *in);
int crypto_sm4_set_key(struct crypto_tfm *tfm, const u8 *in_key,
unsigned int key_len);
void crypto_sm4_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in);
void crypto_sm4_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in);
#endif
......@@ -108,7 +108,7 @@ static inline u32 sm4_round(u32 x0, u32 x1, u32 x2, u32 x3, u32 rk)
* Returns 0 on success. The function fails only if an invalid key size (or
* pointer) is supplied.
*/
int sm4_expandkey(struct crypto_sm4_ctx *ctx, const u8 *in_key,
int sm4_expandkey(struct sm4_ctx *ctx, const u8 *in_key,
unsigned int key_len)
{
u32 rk[4];
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment