Commit 1085680b authored by Herbert Xu's avatar Herbert Xu

crypto: skcipher - Count error stats differently

Move all stat code specific to skcipher into the skcipher code.

While we're at it, change the stats so that bytes and counts
are always incremented even in case of error.  This allows the
reference counting to be removed as we can now increment the
counters prior to the operation.

After the operation we simply increase the error count if necessary.
This is safe as errors can only occur synchronously (or rather,
the existing code already ignored asynchronous errors which are
only visible to the callback function).
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent e2950bf1
...@@ -1073,32 +1073,6 @@ void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, ...@@ -1073,32 +1073,6 @@ void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
crypto_alg_put(alg); crypto_alg_put(alg);
} }
EXPORT_SYMBOL_GPL(crypto_stats_rng_generate); EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
struct crypto_alg *alg)
{
if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic64_inc(&alg->stats.cipher.err_cnt);
} else {
atomic64_inc(&alg->stats.cipher.encrypt_cnt);
atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
}
crypto_alg_put(alg);
}
EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
struct crypto_alg *alg)
{
if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic64_inc(&alg->stats.cipher.err_cnt);
} else {
atomic64_inc(&alg->stats.cipher.decrypt_cnt);
atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
}
crypto_alg_put(alg);
}
EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
#endif #endif
static void __init crypto_start_tests(void) static void __init crypto_start_tests(void)
......
...@@ -11,7 +11,6 @@ ...@@ -11,7 +11,6 @@
#include <linux/sched.h> #include <linux/sched.h>
#include <net/netlink.h> #include <net/netlink.h>
#include <net/sock.h> #include <net/sock.h>
#include <crypto/internal/skcipher.h>
#include <crypto/internal/rng.h> #include <crypto/internal/rng.h>
#include <crypto/internal/cryptouser.h> #include <crypto/internal/cryptouser.h>
...@@ -34,12 +33,6 @@ static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg) ...@@ -34,12 +33,6 @@ static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg)
strscpy(rcipher.type, "cipher", sizeof(rcipher.type)); strscpy(rcipher.type, "cipher", sizeof(rcipher.type));
rcipher.stat_encrypt_cnt = atomic64_read(&alg->stats.cipher.encrypt_cnt);
rcipher.stat_encrypt_tlen = atomic64_read(&alg->stats.cipher.encrypt_tlen);
rcipher.stat_decrypt_cnt = atomic64_read(&alg->stats.cipher.decrypt_cnt);
rcipher.stat_decrypt_tlen = atomic64_read(&alg->stats.cipher.decrypt_tlen);
rcipher.stat_err_cnt = atomic64_read(&alg->stats.cipher.err_cnt);
return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher); return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher);
} }
...@@ -106,10 +99,6 @@ static int crypto_reportstat_one(struct crypto_alg *alg, ...@@ -106,10 +99,6 @@ static int crypto_reportstat_one(struct crypto_alg *alg,
} }
switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) { switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) {
case CRYPTO_ALG_TYPE_SKCIPHER:
if (crypto_report_cipher(skb, alg))
goto nla_put_failure;
break;
case CRYPTO_ALG_TYPE_CIPHER: case CRYPTO_ALG_TYPE_CIPHER:
if (crypto_report_cipher(skb, alg)) if (crypto_report_cipher(skb, alg))
goto nla_put_failure; goto nla_put_failure;
......
...@@ -15,11 +15,14 @@ ...@@ -15,11 +15,14 @@
#include <crypto/scatterwalk.h> #include <crypto/scatterwalk.h>
#include <linux/bug.h> #include <linux/bug.h>
#include <linux/cryptouser.h> #include <linux/cryptouser.h>
#include <linux/compiler.h> #include <linux/err.h>
#include <linux/kernel.h>
#include <linux/list.h> #include <linux/list.h>
#include <linux/mm.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/rtnetlink.h>
#include <linux/seq_file.h> #include <linux/seq_file.h>
#include <linux/slab.h>
#include <linux/string.h>
#include <net/netlink.h> #include <net/netlink.h>
#include "internal.h" #include "internal.h"
...@@ -77,6 +80,35 @@ static inline u8 *skcipher_get_spot(u8 *start, unsigned int len) ...@@ -77,6 +80,35 @@ static inline u8 *skcipher_get_spot(u8 *start, unsigned int len)
return max(start, end_page); return max(start, end_page);
} }
static inline struct skcipher_alg *__crypto_skcipher_alg(
struct crypto_alg *alg)
{
return container_of(alg, struct skcipher_alg, base);
}
static inline struct crypto_istat_cipher *skcipher_get_stat(
struct skcipher_alg *alg)
{
#ifdef CONFIG_CRYPTO_STATS
return &alg->stat;
#else
return NULL;
#endif
}
static inline int crypto_skcipher_errstat(struct skcipher_alg *alg, int err)
{
struct crypto_istat_cipher *istat = skcipher_get_stat(alg);
if (!IS_ENABLED(CONFIG_CRYPTO_STATS))
return err;
if (err && err != -EINPROGRESS && err != -EBUSY)
atomic64_inc(&istat->err_cnt);
return err;
}
static int skcipher_done_slow(struct skcipher_walk *walk, unsigned int bsize) static int skcipher_done_slow(struct skcipher_walk *walk, unsigned int bsize)
{ {
u8 *addr; u8 *addr;
...@@ -605,34 +637,44 @@ EXPORT_SYMBOL_GPL(crypto_skcipher_setkey); ...@@ -605,34 +637,44 @@ EXPORT_SYMBOL_GPL(crypto_skcipher_setkey);
int crypto_skcipher_encrypt(struct skcipher_request *req) int crypto_skcipher_encrypt(struct skcipher_request *req)
{ {
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct crypto_alg *alg = tfm->base.__crt_alg; struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
unsigned int cryptlen = req->cryptlen;
int ret; int ret;
crypto_stats_get(alg); if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
struct crypto_istat_cipher *istat = skcipher_get_stat(alg);
atomic64_inc(&istat->encrypt_cnt);
atomic64_add(req->cryptlen, &istat->encrypt_tlen);
}
if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
ret = -ENOKEY; ret = -ENOKEY;
else else
ret = crypto_skcipher_alg(tfm)->encrypt(req); ret = alg->encrypt(req);
crypto_stats_skcipher_encrypt(cryptlen, ret, alg);
return ret; return crypto_skcipher_errstat(alg, ret);
} }
EXPORT_SYMBOL_GPL(crypto_skcipher_encrypt); EXPORT_SYMBOL_GPL(crypto_skcipher_encrypt);
int crypto_skcipher_decrypt(struct skcipher_request *req) int crypto_skcipher_decrypt(struct skcipher_request *req)
{ {
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct crypto_alg *alg = tfm->base.__crt_alg; struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
unsigned int cryptlen = req->cryptlen;
int ret; int ret;
crypto_stats_get(alg); if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
struct crypto_istat_cipher *istat = skcipher_get_stat(alg);
atomic64_inc(&istat->decrypt_cnt);
atomic64_add(req->cryptlen, &istat->decrypt_tlen);
}
if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
ret = -ENOKEY; ret = -ENOKEY;
else else
ret = crypto_skcipher_alg(tfm)->decrypt(req); ret = alg->decrypt(req);
crypto_stats_skcipher_decrypt(cryptlen, ret, alg);
return ret; return crypto_skcipher_errstat(alg, ret);
} }
EXPORT_SYMBOL_GPL(crypto_skcipher_decrypt); EXPORT_SYMBOL_GPL(crypto_skcipher_decrypt);
...@@ -672,8 +714,7 @@ static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) ...@@ -672,8 +714,7 @@ static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg)
__maybe_unused; __maybe_unused;
static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg)
{ {
struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg, struct skcipher_alg *skcipher = __crypto_skcipher_alg(alg);
base);
seq_printf(m, "type : skcipher\n"); seq_printf(m, "type : skcipher\n");
seq_printf(m, "async : %s\n", seq_printf(m, "async : %s\n",
...@@ -689,9 +730,8 @@ static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) ...@@ -689,9 +730,8 @@ static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg)
#ifdef CONFIG_NET #ifdef CONFIG_NET
static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg) static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
{ {
struct skcipher_alg *skcipher = __crypto_skcipher_alg(alg);
struct crypto_report_blkcipher rblkcipher; struct crypto_report_blkcipher rblkcipher;
struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg,
base);
memset(&rblkcipher, 0, sizeof(rblkcipher)); memset(&rblkcipher, 0, sizeof(rblkcipher));
...@@ -713,6 +753,28 @@ static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg) ...@@ -713,6 +753,28 @@ static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
} }
#endif #endif
static int __maybe_unused crypto_skcipher_report_stat(
struct sk_buff *skb, struct crypto_alg *alg)
{
struct skcipher_alg *skcipher = __crypto_skcipher_alg(alg);
struct crypto_istat_cipher *istat;
struct crypto_stat_cipher rcipher;
istat = skcipher_get_stat(skcipher);
memset(&rcipher, 0, sizeof(rcipher));
strscpy(rcipher.type, "cipher", sizeof(rcipher.type));
rcipher.stat_encrypt_cnt = atomic64_read(&istat->encrypt_cnt);
rcipher.stat_encrypt_tlen = atomic64_read(&istat->encrypt_tlen);
rcipher.stat_decrypt_cnt = atomic64_read(&istat->decrypt_cnt);
rcipher.stat_decrypt_tlen = atomic64_read(&istat->decrypt_tlen);
rcipher.stat_err_cnt = atomic64_read(&istat->err_cnt);
return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher);
}
static const struct crypto_type crypto_skcipher_type = { static const struct crypto_type crypto_skcipher_type = {
.extsize = crypto_alg_extsize, .extsize = crypto_alg_extsize,
.init_tfm = crypto_skcipher_init_tfm, .init_tfm = crypto_skcipher_init_tfm,
...@@ -721,6 +783,9 @@ static const struct crypto_type crypto_skcipher_type = { ...@@ -721,6 +783,9 @@ static const struct crypto_type crypto_skcipher_type = {
.show = crypto_skcipher_show, .show = crypto_skcipher_show,
#endif #endif
.report = crypto_skcipher_report, .report = crypto_skcipher_report,
#ifdef CONFIG_CRYPTO_STATS
.report_stat = crypto_skcipher_report_stat,
#endif
.maskclear = ~CRYPTO_ALG_TYPE_MASK, .maskclear = ~CRYPTO_ALG_TYPE_MASK,
.maskset = CRYPTO_ALG_TYPE_MASK, .maskset = CRYPTO_ALG_TYPE_MASK,
.type = CRYPTO_ALG_TYPE_SKCIPHER, .type = CRYPTO_ALG_TYPE_SKCIPHER,
...@@ -775,6 +840,7 @@ EXPORT_SYMBOL_GPL(crypto_has_skcipher); ...@@ -775,6 +840,7 @@ EXPORT_SYMBOL_GPL(crypto_has_skcipher);
static int skcipher_prepare_alg(struct skcipher_alg *alg) static int skcipher_prepare_alg(struct skcipher_alg *alg)
{ {
struct crypto_istat_cipher *istat = skcipher_get_stat(alg);
struct crypto_alg *base = &alg->base; struct crypto_alg *base = &alg->base;
if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8 || if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8 ||
...@@ -790,6 +856,9 @@ static int skcipher_prepare_alg(struct skcipher_alg *alg) ...@@ -790,6 +856,9 @@ static int skcipher_prepare_alg(struct skcipher_alg *alg)
base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
base->cra_flags |= CRYPTO_ALG_TYPE_SKCIPHER; base->cra_flags |= CRYPTO_ALG_TYPE_SKCIPHER;
if (IS_ENABLED(CONFIG_CRYPTO_STATS))
memset(istat, 0, sizeof(*istat));
return 0; return 0;
} }
......
...@@ -8,6 +8,7 @@ ...@@ -8,6 +8,7 @@
#ifndef _CRYPTO_SKCIPHER_H #ifndef _CRYPTO_SKCIPHER_H
#define _CRYPTO_SKCIPHER_H #define _CRYPTO_SKCIPHER_H
#include <linux/atomic.h>
#include <linux/container_of.h> #include <linux/container_of.h>
#include <linux/crypto.h> #include <linux/crypto.h>
#include <linux/slab.h> #include <linux/slab.h>
...@@ -48,6 +49,22 @@ struct crypto_sync_skcipher { ...@@ -48,6 +49,22 @@ struct crypto_sync_skcipher {
struct crypto_skcipher base; struct crypto_skcipher base;
}; };
/*
* struct crypto_istat_cipher - statistics for cipher algorithm
* @encrypt_cnt: number of encrypt requests
* @encrypt_tlen: total data size handled by encrypt requests
* @decrypt_cnt: number of decrypt requests
* @decrypt_tlen: total data size handled by decrypt requests
* @err_cnt: number of error for cipher requests
*/
struct crypto_istat_cipher {
atomic64_t encrypt_cnt;
atomic64_t encrypt_tlen;
atomic64_t decrypt_cnt;
atomic64_t decrypt_tlen;
atomic64_t err_cnt;
};
/** /**
* struct skcipher_alg - symmetric key cipher definition * struct skcipher_alg - symmetric key cipher definition
* @min_keysize: Minimum key size supported by the transformation. This is the * @min_keysize: Minimum key size supported by the transformation. This is the
...@@ -101,6 +118,7 @@ struct crypto_sync_skcipher { ...@@ -101,6 +118,7 @@ struct crypto_sync_skcipher {
* @walksize: Equal to the chunk size except in cases where the algorithm is * @walksize: Equal to the chunk size except in cases where the algorithm is
* considerably more efficient if it can operate on multiple chunks * considerably more efficient if it can operate on multiple chunks
* in parallel. Should be a multiple of chunksize. * in parallel. Should be a multiple of chunksize.
* @stat: Statistics for cipher algorithm
* @base: Definition of a generic crypto algorithm. * @base: Definition of a generic crypto algorithm.
* *
* All fields except @ivsize are mandatory and must be filled. * All fields except @ivsize are mandatory and must be filled.
...@@ -119,6 +137,10 @@ struct skcipher_alg { ...@@ -119,6 +137,10 @@ struct skcipher_alg {
unsigned int chunksize; unsigned int chunksize;
unsigned int walksize; unsigned int walksize;
#ifdef CONFIG_CRYPTO_STATS
struct crypto_istat_cipher stat;
#endif
struct crypto_alg base; struct crypto_alg base;
}; };
......
...@@ -276,22 +276,6 @@ struct compress_alg { ...@@ -276,22 +276,6 @@ struct compress_alg {
}; };
#ifdef CONFIG_CRYPTO_STATS #ifdef CONFIG_CRYPTO_STATS
/*
* struct crypto_istat_cipher - statistics for cipher algorithm
* @encrypt_cnt: number of encrypt requests
* @encrypt_tlen: total data size handled by encrypt requests
* @decrypt_cnt: number of decrypt requests
* @decrypt_tlen: total data size handled by decrypt requests
* @err_cnt: number of error for cipher requests
*/
struct crypto_istat_cipher {
atomic64_t encrypt_cnt;
atomic64_t encrypt_tlen;
atomic64_t decrypt_cnt;
atomic64_t decrypt_tlen;
atomic64_t err_cnt;
};
/* /*
* struct crypto_istat_rng: statistics for RNG algorithm * struct crypto_istat_rng: statistics for RNG algorithm
* @generate_cnt: number of RNG generate requests * @generate_cnt: number of RNG generate requests
...@@ -385,7 +369,6 @@ struct crypto_istat_rng { ...@@ -385,7 +369,6 @@ struct crypto_istat_rng {
* @cra_destroy: internally used * @cra_destroy: internally used
* *
* @stats: union of all possible crypto_istat_xxx structures * @stats: union of all possible crypto_istat_xxx structures
* @stats.cipher: statistics for cipher algorithm
* @stats.rng: statistics for rng algorithm * @stats.rng: statistics for rng algorithm
* *
* The struct crypto_alg describes a generic Crypto API algorithm and is common * The struct crypto_alg describes a generic Crypto API algorithm and is common
...@@ -422,7 +405,6 @@ struct crypto_alg { ...@@ -422,7 +405,6 @@ struct crypto_alg {
#ifdef CONFIG_CRYPTO_STATS #ifdef CONFIG_CRYPTO_STATS
union { union {
struct crypto_istat_cipher cipher;
struct crypto_istat_rng rng; struct crypto_istat_rng rng;
} stats; } stats;
#endif /* CONFIG_CRYPTO_STATS */ #endif /* CONFIG_CRYPTO_STATS */
...@@ -434,8 +416,6 @@ void crypto_stats_init(struct crypto_alg *alg); ...@@ -434,8 +416,6 @@ void crypto_stats_init(struct crypto_alg *alg);
void crypto_stats_get(struct crypto_alg *alg); void crypto_stats_get(struct crypto_alg *alg);
void crypto_stats_rng_seed(struct crypto_alg *alg, int ret); void crypto_stats_rng_seed(struct crypto_alg *alg, int ret);
void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret); void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret);
void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
#else #else
static inline void crypto_stats_init(struct crypto_alg *alg) static inline void crypto_stats_init(struct crypto_alg *alg)
{} {}
...@@ -445,10 +425,6 @@ static inline void crypto_stats_rng_seed(struct crypto_alg *alg, int ret) ...@@ -445,10 +425,6 @@ static inline void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
{} {}
static inline void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret) static inline void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret)
{} {}
static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
{}
static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
{}
#endif #endif
/* /*
* A helper struct for waiting for completion of async crypto ops * A helper struct for waiting for completion of async crypto ops
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment